source: branches/alma/python/scantable.py@ 1519

Last change on this file since 1519 was 1517, checked in by Kana Sugimoto, 16 years ago

New Development: No

JIRA Issue: Yes (CAS-1079)

Ready to Release: Yes

Interface Changes: Yes

What Interface Changed:
Name of function scantable.pos2data() changed to scantable.chan2data().
And names of selection for the parameter stat are changed in scantable.stats().
NEW: OLD:
min_abc minpos
max_abc maxpos

Test Programs:

You can get min/max values with their position (channels/frequencies/velocities)

by selecting stat='min_abc' or 'max_abc'.

Put in Release Notes: No

Module(s): scantable.stats()

Description:

Change in names for a function and parameters.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 73.9 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None, getpt=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 if getpt is None:
36 getpt = False
37 varlist = vars()
38 from asap._asap import stmath
39 self._math = stmath()
40 if isinstance(filename, Scantable):
41 Scantable.__init__(self, filename)
42 else:
43 if isinstance(filename, str):
44 import os.path
45 filename = os.path.expandvars(filename)
46 filename = os.path.expanduser(filename)
47 if not os.path.exists(filename):
48 s = "File '%s' not found." % (filename)
49 if rcParams['verbose']:
50 asaplog.push(s)
51 print asaplog.pop().strip()
52 return
53 raise IOError(s)
54 if os.path.isdir(filename) \
55 and not os.path.exists(filename+'/table.f1'):
56 # crude check if asap table
57 if os.path.exists(filename+'/table.info'):
58 ondisk = rcParams['scantable.storage'] == 'disk'
59 Scantable.__init__(self, filename, ondisk)
60 if unit is not None:
61 self.set_fluxunit(unit)
62 # do not reset to the default freqframe
63 #self.set_freqframe(rcParams['scantable.freqframe'])
64 else:
65 msg = "The given file '%s'is not a valid " \
66 "asap table." % (filename)
67 if rcParams['verbose']:
68 print msg
69 return
70 else:
71 raise IOError(msg)
72 else:
73 self._fill([filename], unit, average, getpt)
74 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
75 and isinstance(filename[-1], str):
76 self._fill(filename, unit, average, getpt)
77 self._add_history("scantable", varlist)
78 print_log()
79
80 def save(self, name=None, format=None, overwrite=False):
81 """
82 Store the scantable on disk. This can be an asap (aips++) Table,
83 SDFITS or MS2 format.
84 Parameters:
85 name: the name of the outputfile. For format "ASCII"
86 this is the root file name (data in 'name'.txt
87 and header in 'name'_header.txt)
88 format: an optional file format. Default is ASAP.
89 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
90 'SDFITS' (save as SDFITS file)
91 'ASCII' (saves as ascii text file)
92 'MS2' (saves as an aips++
93 MeasurementSet V2)
94 overwrite: If the file should be overwritten if it exists.
95 The default False is to return with warning
96 without writing the output. USE WITH CARE.
97 Example:
98 scan.save('myscan.asap')
99 scan.save('myscan.sdfits', 'SDFITS')
100 """
101 from os import path
102 if format is None: format = rcParams['scantable.save']
103 suffix = '.'+format.lower()
104 if name is None or name == "":
105 name = 'scantable'+suffix
106 msg = "No filename given. Using default name %s..." % name
107 asaplog.push(msg)
108 name = path.expandvars(name)
109 if path.isfile(name) or path.isdir(name):
110 if not overwrite:
111 msg = "File %s exists." % name
112 if rcParams['verbose']:
113 print msg
114 return
115 else:
116 raise IOError(msg)
117 format2 = format.upper()
118 if format2 == 'ASAP':
119 self._save(name)
120 else:
121 from asap._asap import stwriter as stw
122 writer = stw(format2)
123 writer.write(self, name)
124 print_log()
125 return
126
127 def copy(self):
128 """
129 Return a copy of this scantable.
130 Note:
131 This makes a full (deep) copy. scan2 = scan1 makes a reference.
132 Parameters:
133 none
134 Example:
135 copiedscan = scan.copy()
136 """
137 sd = scantable(Scantable._copy(self))
138 return sd
139
140 def drop_scan(self, scanid=None):
141 """
142 Return a new scantable where the specified scan number(s) has(have)
143 been dropped.
144 Parameters:
145 scanid: a (list of) scan number(s)
146 """
147 from asap import _is_sequence_or_number as _is_valid
148 from asap import _to_list
149 from asap import unique
150 if not _is_valid(scanid):
151 if rcParams['verbose']:
152 print "Please specify a scanno to drop from the scantable"
153 return
154 else:
155 raise RuntimeError("No scan given")
156 try:
157 scanid = _to_list(scanid)
158 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
159 for sid in scanid: allscans.remove(sid)
160 if len(allscans) == 0:
161 raise ValueError("Can't remove all scans")
162 except ValueError:
163 if rcParams['verbose']:
164 print "Couldn't find any match."
165 return
166 else: raise
167 try:
168 bsel = self.get_selection()
169 sel = selector()
170 sel.set_scans(allscans)
171 self.set_selection(bsel+sel)
172 scopy = self._copy()
173 self.set_selection(bsel)
174 return scantable(scopy)
175 except RuntimeError:
176 if rcParams['verbose']:
177 print "Couldn't find any match."
178 else:
179 raise
180
181
182 def get_scan(self, scanid=None):
183 """
184 Return a specific scan (by scanno) or collection of scans (by
185 source name) in a new scantable.
186 Note:
187 See scantable.drop_scan() for the inverse operation.
188 Parameters:
189 scanid: a (list of) scanno or a source name, unix-style
190 patterns are accepted for source name matching, e.g.
191 '*_R' gets all 'ref scans
192 Example:
193 # get all scans containing the source '323p459'
194 newscan = scan.get_scan('323p459')
195 # get all 'off' scans
196 refscans = scan.get_scan('*_R')
197 # get a susbset of scans by scanno (as listed in scan.summary())
198 newscan = scan.get_scan([0, 2, 7, 10])
199 """
200 if scanid is None:
201 if rcParams['verbose']:
202 print "Please specify a scan no or name to " \
203 "retrieve from the scantable"
204 return
205 else:
206 raise RuntimeError("No scan given")
207
208 try:
209 bsel = self.get_selection()
210 sel = selector()
211 if type(scanid) is str:
212 sel.set_name(scanid)
213 self.set_selection(bsel+sel)
214 scopy = self._copy()
215 self.set_selection(bsel)
216 return scantable(scopy)
217 elif type(scanid) is int:
218 sel.set_scans([scanid])
219 self.set_selection(bsel+sel)
220 scopy = self._copy()
221 self.set_selection(bsel)
222 return scantable(scopy)
223 elif type(scanid) is list:
224 sel.set_scans(scanid)
225 self.set_selection(sel)
226 scopy = self._copy()
227 self.set_selection(bsel)
228 return scantable(scopy)
229 else:
230 msg = "Illegal scanid type, use 'int' or 'list' if ints."
231 if rcParams['verbose']:
232 print msg
233 else:
234 raise TypeError(msg)
235 except RuntimeError:
236 if rcParams['verbose']: print "Couldn't find any match."
237 else: raise
238
239 def __str__(self):
240 return Scantable._summary(self, True)
241
242 def summary(self, filename=None):
243 """
244 Print a summary of the contents of this scantable.
245 Parameters:
246 filename: the name of a file to write the putput to
247 Default - no file output
248 verbose: print extra info such as the frequency table
249 The default (False) is taken from .asaprc
250 """
251 info = Scantable._summary(self, True)
252 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
253 if filename is not None:
254 if filename is "":
255 filename = 'scantable_summary.txt'
256 from os.path import expandvars, isdir
257 filename = expandvars(filename)
258 if not isdir(filename):
259 data = open(filename, 'w')
260 data.write(info)
261 data.close()
262 else:
263 msg = "Illegal file name '%s'." % (filename)
264 if rcParams['verbose']:
265 print msg
266 else:
267 raise IOError(msg)
268 if rcParams['verbose']:
269 try:
270 from IPython.genutils import page as pager
271 except ImportError:
272 from pydoc import pager
273 pager(info)
274 else:
275 return info
276
277
278 def get_selection(self):
279 """
280 Get the selection object currently set on this scantable.
281 Parameters:
282 none
283 Example:
284 sel = scan.get_selection()
285 sel.set_ifs(0) # select IF 0
286 scan.set_selection(sel) # apply modified selection
287 """
288 return selector(self._getselection())
289
290 def set_selection(self, selection=selector()):
291 """
292 Select a subset of the data. All following operations on this scantable
293 are only applied to thi selection.
294 Parameters:
295 selection: a selector object (default unset the selection)
296 Examples:
297 sel = selector() # create a selection object
298 self.set_scans([0, 3]) # select SCANNO 0 and 3
299 scan.set_selection(sel) # set the selection
300 scan.summary() # will only print summary of scanno 0 an 3
301 scan.set_selection() # unset the selection
302 """
303 self._setselection(selection)
304
305 def get_row(self, row=0, insitu=None):
306 """
307 Select a row in the scantable.
308 Return a scantable with single row.
309 Parameters:
310 row: row no of integration, default is 0.
311 insitu: if False a new scantable is returned.
312 Otherwise, the scaling is done in-situ
313 The default is taken from .asaprc (False)
314 """
315 if insitu is None: insitu = rcParams['insitu']
316 if not insitu:
317 workscan = self.copy()
318 else:
319 workscan = self
320 # Select a row
321 sel=selector()
322 sel.set_scans([workscan.getscan(row)])
323 sel.set_cycles([workscan.getcycle(row)])
324 sel.set_beams([workscan.getbeam(row)])
325 sel.set_ifs([workscan.getif(row)])
326 sel.set_polarisations([workscan.getpol(row)])
327 sel.set_name(workscan._getsourcename(row))
328 workscan.set_selection(sel)
329 if not workscan.nrow() == 1:
330 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
331 raise RuntimeError(msg)
332 del sel
333 if insitu:
334 self._assign(workscan)
335 else:
336 return workscan
337
338 def stats(self, stat='stddev', mask=None):
339 """
340 Determine the specified statistic of the current beam/if/pol
341 Takes a 'mask' as an optional parameter to specify which
342 channels should be excluded.
343 You can get min/max values with their
344 channels/frequencies/velocities by selecting stat='min_abc'
345 or 'max_abc'.
346 Parameters:
347 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
348 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
349 mask: an optional mask specifying where the statistic
350 should be determined.
351 Example:
352 scan.set_unit('channel')
353 msk = scan.create_mask([100, 200], [500, 600])
354 scan.stats(stat='mean', mask=m)
355 """
356 if mask == None:
357 mask = []
358 axes = ['Beam', 'IF', 'Pol', 'Time']
359 if not self._check_ifs():
360 raise ValueError("Cannot apply mask as the IFs have different "
361 "number of channels. Please use setselection() "
362 "to select individual IFs")
363
364 if stat.lower().find('_abc') == -1:
365 statvals = self._math._stats(self, mask, stat)
366 getchan = False
367 sstat = str(stat)
368 else:
369 chan = self._math._minmaxchan(self, mask, stat)
370 getchan = True
371 statvals = []
372 sstat = stat.lower().strip('_abc')
373 out = ''
374 axes = []
375 for i in range(self.nrow()):
376 axis = []
377 axis.append(self.getscan(i))
378 axis.append(self.getbeam(i))
379 axis.append(self.getif(i))
380 axis.append(self.getpol(i))
381 axis.append(self.getcycle(i))
382 axes.append(axis)
383 tm = self._gettime(i)
384 src = self._getsourcename(i)
385 xpos = ''
386 if getchan:
387 qx, qy = self.chan2data(rowno=i, chan=chan[i])
388 statvals.append(qy['value'])
389 xpos = '(x = %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
390 out += 'Scan[%d] (%s) ' % (axis[0], src)
391 out += 'Time[%s]:\n' % (tm)
392 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
393 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
394 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
395 out += '= %3.3f ' % (statvals[i]) +xpos+'\n'
396 out += "--------------------------------------------------\n"
397
398 if rcParams['verbose']:
399 print "--------------------------------------------------"
400 print " ", sstat
401 print "--------------------------------------------------"
402 print out
403 #else:
404 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
405 # 'axes' : axes,
406 # 'data': statvals}
407 return statvals
408
409 def chan2data(self, rowno=0, chan=0):
410 """
411 Returns channel/frequency/velocity and spectral value
412 at an arbitrary row and channel in the scantable.
413 Parameters:
414 rowno: a row number in the scantable. Default is the
415 first row, i.e. rowno=0
416 chan: a channel in the scantable. Default is the first
417 channel, i.e. pos=0
418 """
419 if isinstance(rowno, int) and isinstance(chan, int):
420 x, xlbl = self.get_abcissa(rowno)
421 qx = {'unit': xlbl, 'value': x[chan]}
422 qy = {'unit': self.get_fluxunit(),
423 'value': self._getspectrum(rowno)[chan]}
424 return qx, qy
425
426 def stddev(self, mask=None):
427 """
428 Determine the standard deviation of the current beam/if/pol
429 Takes a 'mask' as an optional parameter to specify which
430 channels should be excluded.
431 Parameters:
432 mask: an optional mask specifying where the standard
433 deviation should be determined.
434
435 Example:
436 scan.set_unit('channel')
437 msk = scan.create_mask([100, 200], [500, 600])
438 scan.stddev(mask=m)
439 """
440 return self.stats(stat='stddev', mask=mask);
441
442
443 def get_column_names(self):
444 """
445 Return a list of column names, which can be used for selection.
446 """
447 return list(Scantable.get_column_names(self))
448
449 def get_tsys(self):
450 """
451 Return the System temperatures.
452 Returns:
453 a list of Tsys values for the current selection
454 """
455
456 return self._row_callback(self._gettsys, "Tsys")
457
458 def _row_callback(self, callback, label):
459 axes = []
460 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
461 out = ""
462 outvec = []
463 for i in range(self.nrow()):
464 axis = []
465 axis.append(self.getscan(i))
466 axis.append(self.getbeam(i))
467 axis.append(self.getif(i))
468 axis.append(self.getpol(i))
469 axis.append(self.getcycle(i))
470 axes.append(axis)
471 tm = self._gettime(i)
472 src = self._getsourcename(i)
473 out += 'Scan[%d] (%s) ' % (axis[0], src)
474 out += 'Time[%s]:\n' % (tm)
475 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
476 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
477 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
478 outvec.append(callback(i))
479 out += '= %3.3f\n' % (outvec[i])
480 out += "--------------------------------------------------\n"
481 if rcParams['verbose']:
482 print "--------------------------------------------------"
483 print " %s" % (label)
484 print "--------------------------------------------------"
485 print out
486 # disabled because the vector seems more useful
487 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
488 return outvec
489
490 def _get_column(self, callback, row=-1):
491 """
492 """
493 if row == -1:
494 return [callback(i) for i in range(self.nrow())]
495 else:
496 if 0 <= row < self.nrow():
497 return callback(row)
498
499
500 def get_time(self, row=-1, asdatetime=False):
501 """
502 Get a list of time stamps for the observations.
503 Return a datetime object for each integration time stamp in the scantable.
504 Parameters:
505 row: row no of integration. Default -1 return all rows
506 asdatetime: return values as datetime objects rather than strings
507 Example:
508 none
509 """
510 from time import strptime
511 from datetime import datetime
512 times = self._get_column(self._gettime, row)
513 if not asdatetime:
514 return times
515 format = "%Y/%m/%d/%H:%M:%S"
516 if isinstance(times, list):
517 return [datetime(*strptime(i, format)[:6]) for i in times]
518 else:
519 return datetime(*strptime(times, format)[:6])
520
521
522 def get_inttime(self, row=-1):
523 """
524 Get a list of integration times for the observations.
525 Return a time in seconds for each integration in the scantable.
526 Parameters:
527 row: row no of integration. Default -1 return all rows.
528 Example:
529 none
530 """
531 return self._get_column(self._getinttime, row)
532
533
534 def get_sourcename(self, row=-1):
535 """
536 Get a list source names for the observations.
537 Return a string for each integration in the scantable.
538 Parameters:
539 row: row no of integration. Default -1 return all rows.
540 Example:
541 none
542 """
543 return self._get_column(self._getsourcename, row)
544
545 def get_elevation(self, row=-1):
546 """
547 Get a list of elevations for the observations.
548 Return a float for each integration in the scantable.
549 Parameters:
550 row: row no of integration. Default -1 return all rows.
551 Example:
552 none
553 """
554 return self._get_column(self._getelevation, row)
555
556 def get_azimuth(self, row=-1):
557 """
558 Get a list of azimuths for the observations.
559 Return a float for each integration in the scantable.
560 Parameters:
561 row: row no of integration. Default -1 return all rows.
562 Example:
563 none
564 """
565 return self._get_column(self._getazimuth, row)
566
567 def get_parangle(self, row=-1):
568 """
569 Get a list of parallactic angles for the observations.
570 Return a float for each integration in the scantable.
571 Parameters:
572 row: row no of integration. Default -1 return all rows.
573 Example:
574 none
575 """
576 return self._get_column(self._getparangle, row)
577
578 def get_direction(self, row=-1):
579 """
580 Get a list of Positions on the sky (direction) for the observations.
581 Return a float for each integration in the scantable.
582 Parameters:
583 row: row no of integration. Default -1 return all rows
584 Example:
585 none
586 """
587 return self._get_column(self._getdirection, row)
588
589 def get_directionval(self, row=-1):
590 """
591 Get a list of Positions on the sky (direction) for the observations.
592 Return a float for each integration in the scantable.
593 Parameters:
594 row: row no of integration. Default -1 return all rows
595 Example:
596 none
597 """
598 return self._get_column(self._getdirectionvec, row)
599
600 def set_unit(self, unit='channel'):
601 """
602 Set the unit for all following operations on this scantable
603 Parameters:
604 unit: optional unit, default is 'channel'
605 one of '*Hz', 'km/s', 'channel', ''
606 """
607 varlist = vars()
608 if unit in ['', 'pixel', 'channel']:
609 unit = ''
610 inf = list(self._getcoordinfo())
611 inf[0] = unit
612 self._setcoordinfo(inf)
613 self._add_history("set_unit", varlist)
614
615 def set_instrument(self, instr):
616 """
617 Set the instrument for subsequent processing.
618 Parameters:
619 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
620 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
621 """
622 self._setInstrument(instr)
623 self._add_history("set_instument", vars())
624 print_log()
625
626 def set_feedtype(self, feedtype):
627 """
628 Overwrite the feed type, which might not be set correctly.
629 Parameters:
630 feedtype: 'linear' or 'circular'
631 """
632 self._setfeedtype(feedtype)
633 self._add_history("set_feedtype", vars())
634 print_log()
635
636 def set_doppler(self, doppler='RADIO'):
637 """
638 Set the doppler for all following operations on this scantable.
639 Parameters:
640 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
641 """
642 varlist = vars()
643 inf = list(self._getcoordinfo())
644 inf[2] = doppler
645 self._setcoordinfo(inf)
646 self._add_history("set_doppler", vars())
647 print_log()
648
649 def set_freqframe(self, frame=None):
650 """
651 Set the frame type of the Spectral Axis.
652 Parameters:
653 frame: an optional frame type, default 'LSRK'. Valid frames are:
654 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
655 'GEO', 'GALACTO', 'LGROUP', 'CMB'
656 Examples:
657 scan.set_freqframe('BARY')
658 """
659 if frame is None: frame = rcParams['scantable.freqframe']
660 varlist = vars()
661 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
662 'GEO', 'GALACTO', 'LGROUP', 'CMB']
663
664 if frame in valid:
665 inf = list(self._getcoordinfo())
666 inf[1] = frame
667 self._setcoordinfo(inf)
668 self._add_history("set_freqframe", varlist)
669 else:
670 msg = "Please specify a valid freq type. Valid types are:\n", valid
671 if rcParams['verbose']:
672 print msg
673 else:
674 raise TypeError(msg)
675 print_log()
676
677 def set_dirframe(self, frame=""):
678 """
679 Set the frame type of the Direction on the sky.
680 Parameters:
681 frame: an optional frame type, default ''. Valid frames are:
682 'J2000', 'B1950', 'GALACTIC'
683 Examples:
684 scan.set_dirframe('GALACTIC')
685 """
686 varlist = vars()
687 try:
688 Scantable.set_dirframe(self, frame)
689 except RuntimeError, msg:
690 if rcParams['verbose']:
691 print msg
692 else:
693 raise
694 self._add_history("set_dirframe", varlist)
695
696 def get_unit(self):
697 """
698 Get the default unit set in this scantable
699 Returns:
700 A unit string
701 """
702 inf = self._getcoordinfo()
703 unit = inf[0]
704 if unit == '': unit = 'channel'
705 return unit
706
707 def get_abcissa(self, rowno=0):
708 """
709 Get the abcissa in the current coordinate setup for the currently
710 selected Beam/IF/Pol
711 Parameters:
712 rowno: an optional row number in the scantable. Default is the
713 first row, i.e. rowno=0
714 Returns:
715 The abcissa values and the format string (as a dictionary)
716 """
717 abc = self._getabcissa(rowno)
718 lbl = self._getabcissalabel(rowno)
719 print_log()
720 return abc, lbl
721
722 def flag(self, mask=None, unflag=False):
723 """
724 Flag the selected data using an optional channel mask.
725 Parameters:
726 mask: an optional channel mask, created with create_mask. Default
727 (no mask) is all channels.
728 unflag: if True, unflag the data
729 """
730 varlist = vars()
731 if mask is None:
732 mask = []
733 try:
734 self._flag(mask, unflag)
735 except RuntimeError, msg:
736 if rcParams['verbose']:
737 print msg
738 return
739 else: raise
740 self._add_history("flag", varlist)
741
742 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
743 """
744 Flag the data in 'lag' space by providing a frequency to remove.
745 Flagged data in the scantable gets set to 0.0 before the fft.
746 No taper is applied.
747 Parameters:
748 frequency: the frequency (really a period within the bandwidth)
749 to remove
750 width: the width of the frequency to remove, to remove a
751 range of frequencies aroung the centre.
752 unit: the frequency unit (default "GHz")
753 Notes:
754 It is recommended to flag edges of the band or strong
755 signals beforehand.
756 """
757 if insitu is None: insitu = rcParams['insitu']
758 self._math._setinsitu(insitu)
759 varlist = vars()
760 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
761 if not base.has_key(unit):
762 raise ValueError("%s is not a valid unit." % unit)
763 try:
764 s = scantable(self._math._lag_flag(self, frequency*base[unit],
765 width*base[unit]))
766 except RuntimeError, msg:
767 if rcParams['verbose']:
768 print msg
769 return
770 else: raise
771 s._add_history("lag_flag", varlist)
772 print_log()
773 if insitu:
774 self._assign(s)
775 else:
776 return s
777
778
779 def create_mask(self, *args, **kwargs):
780 """
781 Compute and return a mask based on [min, max] windows.
782 The specified windows are to be INCLUDED, when the mask is
783 applied.
784 Parameters:
785 [min, max], [min2, max2], ...
786 Pairs of start/end points (inclusive)specifying the regions
787 to be masked
788 invert: optional argument. If specified as True,
789 return an inverted mask, i.e. the regions
790 specified are EXCLUDED
791 row: create the mask using the specified row for
792 unit conversions, default is row=0
793 only necessary if frequency varies over rows.
794 Example:
795 scan.set_unit('channel')
796 a)
797 msk = scan.create_mask([400, 500], [800, 900])
798 # masks everything outside 400 and 500
799 # and 800 and 900 in the unit 'channel'
800
801 b)
802 msk = scan.create_mask([400, 500], [800, 900], invert=True)
803 # masks the regions between 400 and 500
804 # and 800 and 900 in the unit 'channel'
805 c)
806 mask only channel 400
807 msk = scan.create_mask([400, 400])
808 """
809 row = 0
810 if kwargs.has_key("row"):
811 row = kwargs.get("row")
812 data = self._getabcissa(row)
813 u = self._getcoordinfo()[0]
814 if rcParams['verbose']:
815 if u == "": u = "channel"
816 msg = "The current mask window unit is %s" % u
817 i = self._check_ifs()
818 if not i:
819 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
820 asaplog.push(msg)
821 n = self.nchan()
822 msk = _n_bools(n, False)
823 # test if args is a 'list' or a 'normal *args - UGLY!!!
824
825 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
826 and args or args[0]
827 for window in ws:
828 if (len(window) != 2 or window[0] > window[1] ):
829 raise TypeError("A window needs to be defined as [min, max]")
830 for i in range(n):
831 if data[i] >= window[0] and data[i] <= window[1]:
832 msk[i] = True
833 if kwargs.has_key('invert'):
834 if kwargs.get('invert'):
835 msk = mask_not(msk)
836 print_log()
837 return msk
838
839 def get_masklist(self, mask=None, row=0):
840 """
841 Compute and return a list of mask windows, [min, max].
842 Parameters:
843 mask: channel mask, created with create_mask.
844 row: calcutate the masklist using the specified row
845 for unit conversions, default is row=0
846 only necessary if frequency varies over rows.
847 Returns:
848 [min, max], [min2, max2], ...
849 Pairs of start/end points (inclusive)specifying
850 the masked regions
851 """
852 if not (isinstance(mask,list) or isinstance(mask, tuple)):
853 raise TypeError("The mask should be list or tuple.")
854 if len(mask) < 2:
855 raise TypeError("The mask elements should be > 1")
856 if self.nchan() != len(mask):
857 msg = "Number of channels in scantable != number of mask elements"
858 raise TypeError(msg)
859 data = self._getabcissa(row)
860 u = self._getcoordinfo()[0]
861 if rcParams['verbose']:
862 if u == "": u = "channel"
863 msg = "The current mask window unit is %s" % u
864 i = self._check_ifs()
865 if not i:
866 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
867 asaplog.push(msg)
868 masklist=[]
869 ist, ien = None, None
870 ist, ien=self.get_mask_indices(mask)
871 if ist is not None and ien is not None:
872 for i in xrange(len(ist)):
873 range=[data[ist[i]],data[ien[i]]]
874 range.sort()
875 masklist.append([range[0],range[1]])
876 return masklist
877
878 def get_mask_indices(self, mask=None):
879 """
880 Compute and Return lists of mask start indices and mask end indices.
881 Parameters:
882 mask: channel mask, created with create_mask.
883 Returns:
884 List of mask start indices and that of mask end indices,
885 i.e., [istart1,istart2,....], [iend1,iend2,....].
886 """
887 if not (isinstance(mask,list) or isinstance(mask, tuple)):
888 raise TypeError("The mask should be list or tuple.")
889 if len(mask) < 2:
890 raise TypeError("The mask elements should be > 1")
891 istart=[]
892 iend=[]
893 if mask[0]: istart.append(0)
894 for i in range(len(mask)-1):
895 if not mask[i] and mask[i+1]:
896 istart.append(i+1)
897 elif mask[i] and not mask[i+1]:
898 iend.append(i)
899 if mask[len(mask)-1]: iend.append(len(mask)-1)
900 if len(istart) != len(iend):
901 raise RuntimeError("Numbers of mask start != mask end.")
902 for i in range(len(istart)):
903 if istart[i] > iend[i]:
904 raise RuntimeError("Mask start index > mask end index")
905 break
906 return istart,iend
907
908# def get_restfreqs(self):
909# """
910# Get the restfrequency(s) stored in this scantable.
911# The return value(s) are always of unit 'Hz'
912# Parameters:
913# none
914# Returns:
915# a list of doubles
916# """
917# return list(self._getrestfreqs())
918
919 def get_restfreqs(self, ids=None):
920 """
921 Get the restfrequency(s) stored in this scantable.
922 The return value(s) are always of unit 'Hz'
923 Parameters:
924 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
925 be retrieved
926 Returns:
927 dictionary containing ids and a list of doubles for each id
928 """
929 if ids is None:
930 rfreqs={}
931 idlist = self.getmolnos()
932 for i in idlist:
933 rfreqs[i]=list(self._getrestfreqs(i))
934 return rfreqs
935 else:
936 if type(ids)==list or type(ids)==tuple:
937 rfreqs={}
938 for i in ids:
939 rfreqs[i]=list(self._getrestfreqs(i))
940 return rfreqs
941 else:
942 return list(self._getrestfreqs(ids))
943 #return list(self._getrestfreqs(ids))
944
945 def set_restfreqs(self, freqs=None, unit='Hz'):
946 """
947 ********NEED TO BE UPDATED begin************
948 Set or replace the restfrequency specified and
949 If the 'freqs' argument holds a scalar,
950 then that rest frequency will be applied to all the selected
951 data. If the 'freqs' argument holds
952 a vector, then it MUST be of equal or smaller length than
953 the number of IFs (and the available restfrequencies will be
954 replaced by this vector). In this case, *all* data have
955 the restfrequency set per IF according
956 to the corresponding value you give in the 'freqs' vector.
957 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
958 IF 1 gets restfreq 2e9.
959 ********NEED TO BE UPDATED end************
960 You can also specify the frequencies via a linecatalog/
961
962 Parameters:
963 freqs: list of rest frequency values or string idenitfiers
964 unit: unit for rest frequency (default 'Hz')
965
966 Example:
967 # set the given restfrequency for the all currently selected IFs
968 scan.set_restfreqs(freqs=1.4e9)
969 # set multiple restfrequencies to all the selected data
970 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
971 # If the number of IFs in the data is >= 2 the IF0 gets the first
972 # value IF1 the second... NOTE that freqs needs to be
973 # specified in list of list (e.g. [[],[],...] ).
974 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
975 #set the given restfrequency for the whole table (by name)
976 scan.set_restfreqs(freqs="OH1667")
977
978 Note:
979 To do more sophisticate Restfrequency setting, e.g. on a
980 source and IF basis, use scantable.set_selection() before using
981 this function.
982 # provide your scantable is call scan
983 selection = selector()
984 selection.set_name("ORION*")
985 selection.set_ifs([1])
986 scan.set_selection(selection)
987 scan.set_restfreqs(freqs=86.6e9)
988
989 """
990 varlist = vars()
991 from asap import linecatalog
992 # simple value
993 if isinstance(freqs, int) or isinstance(freqs, float):
994 # TT mod
995 #self._setrestfreqs(freqs, "",unit)
996 self._setrestfreqs([freqs], [""],unit)
997 # list of values
998 elif isinstance(freqs, list) or isinstance(freqs, tuple):
999 # list values are scalars
1000 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1001 self._setrestfreqs(freqs, [""],unit)
1002 # list values are tuples, (value, name)
1003 elif isinstance(freqs[-1], dict):
1004 #sel = selector()
1005 #savesel = self._getselection()
1006 #iflist = self.getifnos()
1007 #for i in xrange(len(freqs)):
1008 # sel.set_ifs(iflist[i])
1009 # self._setselection(sel)
1010 # self._setrestfreqs(freqs[i], "",unit)
1011 #self._setselection(savesel)
1012 self._setrestfreqs(freqs["value"],
1013 freqs["name"], "MHz")
1014 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1015 sel = selector()
1016 savesel = self._getselection()
1017 iflist = self.getifnos()
1018 if len(freqs)>len(iflist):
1019 raise ValueError("number of elements in list of list exeeds the current IF selections")
1020 for i in xrange(len(freqs)):
1021 sel.set_ifs(iflist[i])
1022 self._setselection(sel)
1023 self._setrestfreqs(freqs[i]["value"],
1024 freqs[i]["name"], "MHz")
1025 self._setselection(savesel)
1026 # freqs are to be taken from a linecatalog
1027 elif isinstance(freqs, linecatalog):
1028 sel = selector()
1029 savesel = self._getselection()
1030 for i in xrange(freqs.nrow()):
1031 sel.set_ifs(iflist[i])
1032 self._setselection(sel)
1033 self._setrestfreqs(freqs.get_frequency(i),
1034 freqs.get_name(i), "MHz")
1035 # ensure that we are not iterating past nIF
1036 if i == self.nif()-1: break
1037 self._setselection(savesel)
1038 else:
1039 return
1040 self._add_history("set_restfreqs", varlist)
1041
1042 def shift_refpix(self, delta):
1043 """
1044 Shift the reference pixel of the Spectra Coordinate by an
1045 integer amount.
1046 Parameters:
1047 delta: the amount to shift by
1048 Note:
1049 Be careful using this with broadband data.
1050 """
1051 Scantable.shift(self, delta)
1052
1053 def history(self, filename=None):
1054 """
1055 Print the history. Optionally to a file.
1056 Parameters:
1057 filename: The name of the file to save the history to.
1058 """
1059 hist = list(self._gethistory())
1060 out = "-"*80
1061 for h in hist:
1062 if h.startswith("---"):
1063 out += "\n"+h
1064 else:
1065 items = h.split("##")
1066 date = items[0]
1067 func = items[1]
1068 items = items[2:]
1069 out += "\n"+date+"\n"
1070 out += "Function: %s\n Parameters:" % (func)
1071 for i in items:
1072 s = i.split("=")
1073 out += "\n %s = %s" % (s[0], s[1])
1074 out += "\n"+"-"*80
1075 if filename is not None:
1076 if filename is "":
1077 filename = 'scantable_history.txt'
1078 import os
1079 filename = os.path.expandvars(os.path.expanduser(filename))
1080 if not os.path.isdir(filename):
1081 data = open(filename, 'w')
1082 data.write(out)
1083 data.close()
1084 else:
1085 msg = "Illegal file name '%s'." % (filename)
1086 if rcParams['verbose']:
1087 print msg
1088 else:
1089 raise IOError(msg)
1090 if rcParams['verbose']:
1091 try:
1092 from IPython.genutils import page as pager
1093 except ImportError:
1094 from pydoc import pager
1095 pager(out)
1096 else:
1097 return out
1098 return
1099 #
1100 # Maths business
1101 #
1102
1103 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1104 """
1105 Return the (time) weighted average of a scan.
1106 Note:
1107 in channels only - align if necessary
1108 Parameters:
1109 mask: an optional mask (only used for 'var' and 'tsys'
1110 weighting)
1111 scanav: True averages each scan separately
1112 False (default) averages all scans together,
1113 weight: Weighting scheme.
1114 'none' (mean no weight)
1115 'var' (1/var(spec) weighted)
1116 'tsys' (1/Tsys**2 weighted)
1117 'tint' (integration time weighted)
1118 'tintsys' (Tint/Tsys**2)
1119 'median' ( median averaging)
1120 The default is 'tint'
1121 align: align the spectra in velocity before averaging. It takes
1122 the time of the first spectrum as reference time.
1123 Example:
1124 # time average the scantable without using a mask
1125 newscan = scan.average_time()
1126 """
1127 varlist = vars()
1128 if weight is None: weight = 'TINT'
1129 if mask is None: mask = ()
1130 if scanav: scanav = "SCAN"
1131 else: scanav = "NONE"
1132 scan = (self, )
1133 try:
1134 if align:
1135 scan = (self.freq_align(insitu=False), )
1136 s = None
1137 if weight.upper() == 'MEDIAN':
1138 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1139 scanav))
1140 else:
1141 s = scantable(self._math._average(scan, mask, weight.upper(),
1142 scanav))
1143 except RuntimeError, msg:
1144 if rcParams['verbose']:
1145 print msg
1146 return
1147 else: raise
1148 s._add_history("average_time", varlist)
1149 print_log()
1150 return s
1151
1152 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1153 """
1154 Return a scan where all spectra are converted to either
1155 Jansky or Kelvin depending upon the flux units of the scan table.
1156 By default the function tries to look the values up internally.
1157 If it can't find them (or if you want to over-ride), you must
1158 specify EITHER jyperk OR eta (and D which it will try to look up
1159 also if you don't set it). jyperk takes precedence if you set both.
1160 Parameters:
1161 jyperk: the Jy / K conversion factor
1162 eta: the aperture efficiency
1163 d: the geomtric diameter (metres)
1164 insitu: if False a new scantable is returned.
1165 Otherwise, the scaling is done in-situ
1166 The default is taken from .asaprc (False)
1167 """
1168 if insitu is None: insitu = rcParams['insitu']
1169 self._math._setinsitu(insitu)
1170 varlist = vars()
1171 if jyperk is None: jyperk = -1.0
1172 if d is None: d = -1.0
1173 if eta is None: eta = -1.0
1174 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1175 s._add_history("convert_flux", varlist)
1176 print_log()
1177 if insitu: self._assign(s)
1178 else: return s
1179
1180 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1181 """
1182 Return a scan after applying a gain-elevation correction.
1183 The correction can be made via either a polynomial or a
1184 table-based interpolation (and extrapolation if necessary).
1185 You specify polynomial coefficients, an ascii table or neither.
1186 If you specify neither, then a polynomial correction will be made
1187 with built in coefficients known for certain telescopes (an error
1188 will occur if the instrument is not known).
1189 The data and Tsys are *divided* by the scaling factors.
1190 Parameters:
1191 poly: Polynomial coefficients (default None) to compute a
1192 gain-elevation correction as a function of
1193 elevation (in degrees).
1194 filename: The name of an ascii file holding correction factors.
1195 The first row of the ascii file must give the column
1196 names and these MUST include columns
1197 "ELEVATION" (degrees) and "FACTOR" (multiply data
1198 by this) somewhere.
1199 The second row must give the data type of the
1200 column. Use 'R' for Real and 'I' for Integer.
1201 An example file would be
1202 (actual factors are arbitrary) :
1203
1204 TIME ELEVATION FACTOR
1205 R R R
1206 0.1 0 0.8
1207 0.2 20 0.85
1208 0.3 40 0.9
1209 0.4 60 0.85
1210 0.5 80 0.8
1211 0.6 90 0.75
1212 method: Interpolation method when correcting from a table.
1213 Values are "nearest", "linear" (default), "cubic"
1214 and "spline"
1215 insitu: if False a new scantable is returned.
1216 Otherwise, the scaling is done in-situ
1217 The default is taken from .asaprc (False)
1218 """
1219
1220 if insitu is None: insitu = rcParams['insitu']
1221 self._math._setinsitu(insitu)
1222 varlist = vars()
1223 if poly is None:
1224 poly = ()
1225 from os.path import expandvars
1226 filename = expandvars(filename)
1227 s = scantable(self._math._gainel(self, poly, filename, method))
1228 s._add_history("gain_el", varlist)
1229 print_log()
1230 if insitu: self._assign(s)
1231 else: return s
1232
1233 def freq_align(self, reftime=None, method='cubic', insitu=None):
1234 """
1235 Return a scan where all rows have been aligned in frequency/velocity.
1236 The alignment frequency frame (e.g. LSRK) is that set by function
1237 set_freqframe.
1238 Parameters:
1239 reftime: reference time to align at. By default, the time of
1240 the first row of data is used.
1241 method: Interpolation method for regridding the spectra.
1242 Choose from "nearest", "linear", "cubic" (default)
1243 and "spline"
1244 insitu: if False a new scantable is returned.
1245 Otherwise, the scaling is done in-situ
1246 The default is taken from .asaprc (False)
1247 """
1248 if insitu is None: insitu = rcParams["insitu"]
1249 self._math._setinsitu(insitu)
1250 varlist = vars()
1251 if reftime is None: reftime = ""
1252 s = scantable(self._math._freq_align(self, reftime, method))
1253 s._add_history("freq_align", varlist)
1254 print_log()
1255 if insitu: self._assign(s)
1256 else: return s
1257
1258 def opacity(self, tau, insitu=None):
1259 """
1260 Apply an opacity correction. The data
1261 and Tsys are multiplied by the correction factor.
1262 Parameters:
1263 tau: Opacity from which the correction factor is
1264 exp(tau*ZD)
1265 where ZD is the zenith-distance
1266 insitu: if False a new scantable is returned.
1267 Otherwise, the scaling is done in-situ
1268 The default is taken from .asaprc (False)
1269 """
1270 if insitu is None: insitu = rcParams['insitu']
1271 self._math._setinsitu(insitu)
1272 varlist = vars()
1273 s = scantable(self._math._opacity(self, tau))
1274 s._add_history("opacity", varlist)
1275 print_log()
1276 if insitu: self._assign(s)
1277 else: return s
1278
1279 def bin(self, width=5, insitu=None):
1280 """
1281 Return a scan where all spectra have been binned up.
1282 Parameters:
1283 width: The bin width (default=5) in pixels
1284 insitu: if False a new scantable is returned.
1285 Otherwise, the scaling is done in-situ
1286 The default is taken from .asaprc (False)
1287 """
1288 if insitu is None: insitu = rcParams['insitu']
1289 self._math._setinsitu(insitu)
1290 varlist = vars()
1291 s = scantable(self._math._bin(self, width))
1292 s._add_history("bin", varlist)
1293 print_log()
1294 if insitu: self._assign(s)
1295 else: return s
1296
1297
1298 def resample(self, width=5, method='cubic', insitu=None):
1299 """
1300 Return a scan where all spectra have been binned up.
1301
1302 Parameters:
1303 width: The bin width (default=5) in pixels
1304 method: Interpolation method when correcting from a table.
1305 Values are "nearest", "linear", "cubic" (default)
1306 and "spline"
1307 insitu: if False a new scantable is returned.
1308 Otherwise, the scaling is done in-situ
1309 The default is taken from .asaprc (False)
1310 """
1311 if insitu is None: insitu = rcParams['insitu']
1312 self._math._setinsitu(insitu)
1313 varlist = vars()
1314 s = scantable(self._math._resample(self, method, width))
1315 s._add_history("resample", varlist)
1316 print_log()
1317 if insitu: self._assign(s)
1318 else: return s
1319
1320
1321 def average_pol(self, mask=None, weight='none'):
1322 """
1323 Average the Polarisations together.
1324 Parameters:
1325 mask: An optional mask defining the region, where the
1326 averaging will be applied. The output will have all
1327 specified points masked.
1328 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1329 weighted), or 'tsys' (1/Tsys**2 weighted)
1330 """
1331 varlist = vars()
1332 if mask is None:
1333 mask = ()
1334 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1335 s._add_history("average_pol", varlist)
1336 print_log()
1337 return s
1338
1339 def average_beam(self, mask=None, weight='none'):
1340 """
1341 Average the Beams together.
1342 Parameters:
1343 mask: An optional mask defining the region, where the
1344 averaging will be applied. The output will have all
1345 specified points masked.
1346 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1347 weighted), or 'tsys' (1/Tsys**2 weighted)
1348 """
1349 varlist = vars()
1350 if mask is None:
1351 mask = ()
1352 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1353 s._add_history("average_beam", varlist)
1354 print_log()
1355 return s
1356
1357 def convert_pol(self, poltype=None):
1358 """
1359 Convert the data to a different polarisation type.
1360 Parameters:
1361 poltype: The new polarisation type. Valid types are:
1362 "linear", "stokes" and "circular"
1363 """
1364 varlist = vars()
1365 try:
1366 s = scantable(self._math._convertpol(self, poltype))
1367 except RuntimeError, msg:
1368 if rcParams['verbose']:
1369 print msg
1370 return
1371 else:
1372 raise
1373 s._add_history("convert_pol", varlist)
1374 print_log()
1375 return s
1376
1377 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1378 """
1379 Smooth the spectrum by the specified kernel (conserving flux).
1380 Parameters:
1381 kernel: The type of smoothing kernel. Select from
1382 'hanning' (default), 'gaussian', 'boxcar' and
1383 'rmedian'
1384 width: The width of the kernel in pixels. For hanning this is
1385 ignored otherwise it defauls to 5 pixels.
1386 For 'gaussian' it is the Full Width Half
1387 Maximum. For 'boxcar' it is the full width.
1388 For 'rmedian' it is the half width.
1389 insitu: if False a new scantable is returned.
1390 Otherwise, the scaling is done in-situ
1391 The default is taken from .asaprc (False)
1392 Example:
1393 none
1394 """
1395 if insitu is None: insitu = rcParams['insitu']
1396 self._math._setinsitu(insitu)
1397 varlist = vars()
1398 s = scantable(self._math._smooth(self, kernel.lower(), width))
1399 s._add_history("smooth", varlist)
1400 print_log()
1401 if insitu: self._assign(s)
1402 else: return s
1403
1404
1405 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1406 """
1407 Return a scan which has been baselined (all rows) by a polynomial.
1408 Parameters:
1409 mask: an optional mask
1410 order: the order of the polynomial (default is 0)
1411 plot: plot the fit and the residual. In this each
1412 indivual fit has to be approved, by typing 'y'
1413 or 'n'
1414 uselin: use linear polynomial fit
1415 insitu: if False a new scantable is returned.
1416 Otherwise, the scaling is done in-situ
1417 The default is taken from .asaprc (False)
1418 Example:
1419 # return a scan baselined by a third order polynomial,
1420 # not using a mask
1421 bscan = scan.poly_baseline(order=3)
1422 """
1423 if insitu is None: insitu = rcParams['insitu']
1424 varlist = vars()
1425 if mask is None:
1426 mask = [True for i in xrange(self.nchan(-1))]
1427 from asap.asapfitter import fitter
1428 try:
1429 f = fitter()
1430 f.set_scan(self, mask)
1431 if uselin:
1432 f.set_function(lpoly=order)
1433 else:
1434 f.set_function(poly=order)
1435 s = f.auto_fit(insitu, plot=plot)
1436 # Save parameters of baseline fits as a class attribute.
1437 # NOTICE: It does not reflect changes in scantable!
1438 self.blpars = f.blpars
1439 s._add_history("poly_baseline", varlist)
1440 print_log()
1441 if insitu: self._assign(s)
1442 else: return s
1443 except RuntimeError:
1444 msg = "The fit failed, possibly because it didn't converge."
1445 if rcParams['verbose']:
1446 print msg
1447 return
1448 else:
1449 raise RuntimeError(msg)
1450
1451
1452 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1453 threshold=3, chan_avg_limit=1, plot=False,
1454 insitu=None):
1455 """
1456 Return a scan which has been baselined (all rows) by a polynomial.
1457 Spectral lines are detected first using linefinder and masked out
1458 to avoid them affecting the baseline solution.
1459
1460 Parameters:
1461 mask: an optional mask retreived from scantable
1462 edge: an optional number of channel to drop at
1463 the edge of spectrum. If only one value is
1464 specified, the same number will be dropped from
1465 both sides of the spectrum. Default is to keep
1466 all channels. Nested tuples represent individual
1467 edge selection for different IFs (a number of spectral
1468 channels can be different)
1469 order: the order of the polynomial (default is 0)
1470 threshold: the threshold used by line finder. It is better to
1471 keep it large as only strong lines affect the
1472 baseline solution.
1473 chan_avg_limit:
1474 a maximum number of consequtive spectral channels to
1475 average during the search of weak and broad lines.
1476 The default is no averaging (and no search for weak
1477 lines). If such lines can affect the fitted baseline
1478 (e.g. a high order polynomial is fitted), increase this
1479 parameter (usually values up to 8 are reasonable). Most
1480 users of this method should find the default value
1481 sufficient.
1482 plot: plot the fit and the residual. In this each
1483 indivual fit has to be approved, by typing 'y'
1484 or 'n'
1485 insitu: if False a new scantable is returned.
1486 Otherwise, the scaling is done in-situ
1487 The default is taken from .asaprc (False)
1488
1489 Example:
1490 scan2=scan.auto_poly_baseline(order=7)
1491 """
1492 if insitu is None: insitu = rcParams['insitu']
1493 varlist = vars()
1494 from asap.asapfitter import fitter
1495 from asap.asaplinefind import linefinder
1496 from asap import _is_sequence_or_number as _is_valid
1497
1498 # check whether edge is set up for each IF individually
1499 individualedge = False;
1500 if len(edge) > 1:
1501 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1502 individualedge = True;
1503
1504 if not _is_valid(edge, int) and not individualedge:
1505 raise ValueError, "Parameter 'edge' has to be an integer or a \
1506 pair of integers specified as a tuple. Nested tuples are allowed \
1507 to make individual selection for different IFs."
1508
1509 curedge = (0, 0)
1510 if individualedge:
1511 for edgepar in edge:
1512 if not _is_valid(edgepar, int):
1513 raise ValueError, "Each element of the 'edge' tuple has \
1514 to be a pair of integers or an integer."
1515 else:
1516 curedge = edge;
1517
1518 # setup fitter
1519 f = fitter()
1520 f.set_function(poly=order)
1521
1522 # setup line finder
1523 fl = linefinder()
1524 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1525
1526 if not insitu:
1527 workscan = self.copy()
1528 else:
1529 workscan = self
1530
1531 fl.set_scan(workscan)
1532
1533 rows = range(workscan.nrow())
1534 # Save parameters of baseline fits & masklists as a class attribute.
1535 # NOTICE: It does not reflect changes in scantable!
1536 if len(rows) > 0:
1537 self.blpars=[]
1538 self.masklists=[]
1539 asaplog.push("Processing:")
1540 for r in rows:
1541 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1542 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1543 workscan.getpol(r), workscan.getcycle(r))
1544 asaplog.push(msg, False)
1545
1546 # figure out edge parameter
1547 if individualedge:
1548 if len(edge) >= workscan.getif(r):
1549 raise RuntimeError, "Number of edge elements appear to " \
1550 "be less than the number of IFs"
1551 curedge = edge[workscan.getif(r)]
1552
1553 # setup line finder
1554 fl.find_lines(r, mask, curedge)
1555 outmask=fl.get_mask()
1556 f.set_scan(workscan, fl.get_mask())
1557 f.x = workscan._getabcissa(r)
1558 f.y = workscan._getspectrum(r)
1559 f.data = None
1560 f.fit()
1561
1562 # Show mask list
1563 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1564 msg = "mask range: "+str(masklist)
1565 asaplog.push(msg, False)
1566
1567 fpar = f.get_parameters()
1568 if plot:
1569 f.plot(residual=True)
1570 x = raw_input("Accept fit ( [y]/n ): ")
1571 if x.upper() == 'N':
1572 self.blpars.append(None)
1573 self.masklists.append(None)
1574 continue
1575 workscan._setspectrum(f.fitter.getresidual(), r)
1576 self.blpars.append(fpar)
1577 self.masklists.append(masklist)
1578 if plot:
1579 f._p.unmap()
1580 f._p = None
1581 workscan._add_history("auto_poly_baseline", varlist)
1582 if insitu:
1583 self._assign(workscan)
1584 else:
1585 return workscan
1586
1587 def rotate_linpolphase(self, angle):
1588 """
1589 Rotate the phase of the complex polarization O=Q+iU correlation.
1590 This is always done in situ in the raw data. So if you call this
1591 function more than once then each call rotates the phase further.
1592 Parameters:
1593 angle: The angle (degrees) to rotate (add) by.
1594 Examples:
1595 scan.rotate_linpolphase(2.3)
1596 """
1597 varlist = vars()
1598 self._math._rotate_linpolphase(self, angle)
1599 self._add_history("rotate_linpolphase", varlist)
1600 print_log()
1601 return
1602
1603
1604 def rotate_xyphase(self, angle):
1605 """
1606 Rotate the phase of the XY correlation. This is always done in situ
1607 in the data. So if you call this function more than once
1608 then each call rotates the phase further.
1609 Parameters:
1610 angle: The angle (degrees) to rotate (add) by.
1611 Examples:
1612 scan.rotate_xyphase(2.3)
1613 """
1614 varlist = vars()
1615 self._math._rotate_xyphase(self, angle)
1616 self._add_history("rotate_xyphase", varlist)
1617 print_log()
1618 return
1619
1620 def swap_linears(self):
1621 """
1622 Swap the linear polarisations XX and YY, or better the first two
1623 polarisations as this also works for ciculars.
1624 """
1625 varlist = vars()
1626 self._math._swap_linears(self)
1627 self._add_history("swap_linears", varlist)
1628 print_log()
1629 return
1630
1631 def invert_phase(self):
1632 """
1633 Invert the phase of the complex polarisation
1634 """
1635 varlist = vars()
1636 self._math._invert_phase(self)
1637 self._add_history("invert_phase", varlist)
1638 print_log()
1639 return
1640
1641 def add(self, offset, insitu=None):
1642 """
1643 Return a scan where all spectra have the offset added
1644 Parameters:
1645 offset: the offset
1646 insitu: if False a new scantable is returned.
1647 Otherwise, the scaling is done in-situ
1648 The default is taken from .asaprc (False)
1649 """
1650 if insitu is None: insitu = rcParams['insitu']
1651 self._math._setinsitu(insitu)
1652 varlist = vars()
1653 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1654 s._add_history("add", varlist)
1655 print_log()
1656 if insitu:
1657 self._assign(s)
1658 else:
1659 return s
1660
1661 def scale(self, factor, tsys=True, insitu=None):
1662 """
1663 Return a scan where all spectra are scaled by the give 'factor'
1664 Parameters:
1665 factor: the scaling factor
1666 insitu: if False a new scantable is returned.
1667 Otherwise, the scaling is done in-situ
1668 The default is taken from .asaprc (False)
1669 tsys: if True (default) then apply the operation to Tsys
1670 as well as the data
1671 """
1672 if insitu is None: insitu = rcParams['insitu']
1673 self._math._setinsitu(insitu)
1674 varlist = vars()
1675 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1676 s._add_history("scale", varlist)
1677 print_log()
1678 if insitu:
1679 self._assign(s)
1680 else:
1681 return s
1682
1683 def auto_quotient(self, preserve=True, mode='paired'):
1684 """
1685 This function allows to build quotients automatically.
1686 It assumes the observation to have the same numer of
1687 "ons" and "offs"
1688 Parameters:
1689 preserve: you can preserve (default) the continuum or
1690 remove it. The equations used are
1691 preserve: Output = Toff * (on/off) - Toff
1692 remove: Output = Toff * (on/off) - Ton
1693 mode: the on/off detection mode
1694 'paired' (default)
1695 identifies 'off' scans by the
1696 trailing '_R' (Mopra/Parkes) or
1697 '_e'/'_w' (Tid) and matches
1698 on/off pairs from the observing pattern
1699 'time'
1700 finds the closest off in time
1701
1702 """
1703 modes = ["time", "paired"]
1704 if not mode in modes:
1705 msg = "please provide valid mode. Valid modes are %s" % (modes)
1706 raise ValueError(msg)
1707 varlist = vars()
1708 s = None
1709 if mode.lower() == "paired":
1710 basesel = self.get_selection()
1711 sel = selector()+basesel
1712 sel.set_query("SRCTYPE==1")
1713 self.set_selection(sel)
1714 offs = self.copy()
1715 sel.set_query("SRCTYPE==0")
1716 self.set_selection(sel)
1717 ons = self.copy()
1718 s = scantable(self._math._quotient(ons, offs, preserve))
1719 self.set_selection(basesel)
1720 elif mode.lower() == "time":
1721 s = scantable(self._math._auto_quotient(self, mode, preserve))
1722 s._add_history("auto_quotient", varlist)
1723 print_log()
1724 return s
1725
1726 def mx_quotient(self, mask = None, weight='median', preserve=True):
1727 """
1728 Form a quotient using "off" beams when observing in "MX" mode.
1729 Parameters:
1730 mask: an optional mask to be used when weight == 'stddev'
1731 weight: How to average the off beams. Default is 'median'.
1732 preserve: you can preserve (default) the continuum or
1733 remove it. The equations used are
1734 preserve: Output = Toff * (on/off) - Toff
1735 remove: Output = Toff * (on/off) - Ton
1736 """
1737 if mask is None: mask = ()
1738 varlist = vars()
1739 on = scantable(self._math._mx_extract(self, 'on'))
1740 preoff = scantable(self._math._mx_extract(self, 'off'))
1741 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1742 from asapmath import quotient
1743 q = quotient(on, off, preserve)
1744 q._add_history("mx_quotient", varlist)
1745 print_log()
1746 return q
1747
1748 def freq_switch(self, insitu=None):
1749 """
1750 Apply frequency switching to the data.
1751 Parameters:
1752 insitu: if False a new scantable is returned.
1753 Otherwise, the swictching is done in-situ
1754 The default is taken from .asaprc (False)
1755 Example:
1756 none
1757 """
1758 if insitu is None: insitu = rcParams['insitu']
1759 self._math._setinsitu(insitu)
1760 varlist = vars()
1761 s = scantable(self._math._freqswitch(self))
1762 s._add_history("freq_switch", varlist)
1763 print_log()
1764 if insitu: self._assign(s)
1765 else: return s
1766
1767 def recalc_azel(self):
1768 """
1769 Recalculate the azimuth and elevation for each position.
1770 Parameters:
1771 none
1772 Example:
1773 """
1774 varlist = vars()
1775 self._recalcazel()
1776 self._add_history("recalc_azel", varlist)
1777 print_log()
1778 return
1779
1780 def __add__(self, other):
1781 varlist = vars()
1782 s = None
1783 if isinstance(other, scantable):
1784 s = scantable(self._math._binaryop(self, other, "ADD"))
1785 elif isinstance(other, float):
1786 s = scantable(self._math._unaryop(self, other, "ADD", False))
1787 else:
1788 raise TypeError("Other input is not a scantable or float value")
1789 s._add_history("operator +", varlist)
1790 print_log()
1791 return s
1792
1793 def __sub__(self, other):
1794 """
1795 implicit on all axes and on Tsys
1796 """
1797 varlist = vars()
1798 s = None
1799 if isinstance(other, scantable):
1800 s = scantable(self._math._binaryop(self, other, "SUB"))
1801 elif isinstance(other, float):
1802 s = scantable(self._math._unaryop(self, other, "SUB", False))
1803 else:
1804 raise TypeError("Other input is not a scantable or float value")
1805 s._add_history("operator -", varlist)
1806 print_log()
1807 return s
1808
1809 def __mul__(self, other):
1810 """
1811 implicit on all axes and on Tsys
1812 """
1813 varlist = vars()
1814 s = None
1815 if isinstance(other, scantable):
1816 s = scantable(self._math._binaryop(self, other, "MUL"))
1817 elif isinstance(other, float):
1818 s = scantable(self._math._unaryop(self, other, "MUL", False))
1819 else:
1820 raise TypeError("Other input is not a scantable or float value")
1821 s._add_history("operator *", varlist)
1822 print_log()
1823 return s
1824
1825
1826 def __div__(self, other):
1827 """
1828 implicit on all axes and on Tsys
1829 """
1830 varlist = vars()
1831 s = None
1832 if isinstance(other, scantable):
1833 s = scantable(self._math._binaryop(self, other, "DIV"))
1834 elif isinstance(other, float):
1835 if other == 0.0:
1836 raise ZeroDivisionError("Dividing by zero is not recommended")
1837 s = scantable(self._math._unaryop(self, other, "DIV", False))
1838 else:
1839 raise TypeError("Other input is not a scantable or float value")
1840 s._add_history("operator /", varlist)
1841 print_log()
1842 return s
1843
1844 def get_fit(self, row=0):
1845 """
1846 Print or return the stored fits for a row in the scantable
1847 Parameters:
1848 row: the row which the fit has been applied to.
1849 """
1850 if row > self.nrow():
1851 return
1852 from asap.asapfit import asapfit
1853 fit = asapfit(self._getfit(row))
1854 if rcParams['verbose']:
1855 print fit
1856 return
1857 else:
1858 return fit.as_dict()
1859
1860 def _add_history(self, funcname, parameters):
1861 # create date
1862 sep = "##"
1863 from datetime import datetime
1864 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1865 hist = dstr+sep
1866 hist += funcname+sep#cdate+sep
1867 if parameters.has_key('self'): del parameters['self']
1868 for k, v in parameters.iteritems():
1869 if type(v) is dict:
1870 for k2, v2 in v.iteritems():
1871 hist += k2
1872 hist += "="
1873 if isinstance(v2, scantable):
1874 hist += 'scantable'
1875 elif k2 == 'mask':
1876 if isinstance(v2, list) or isinstance(v2, tuple):
1877 hist += str(self._zip_mask(v2))
1878 else:
1879 hist += str(v2)
1880 else:
1881 hist += str(v2)
1882 else:
1883 hist += k
1884 hist += "="
1885 if isinstance(v, scantable):
1886 hist += 'scantable'
1887 elif k == 'mask':
1888 if isinstance(v, list) or isinstance(v, tuple):
1889 hist += str(self._zip_mask(v))
1890 else:
1891 hist += str(v)
1892 else:
1893 hist += str(v)
1894 hist += sep
1895 hist = hist[:-2] # remove trailing '##'
1896 self._addhistory(hist)
1897
1898
1899 def _zip_mask(self, mask):
1900 mask = list(mask)
1901 i = 0
1902 segments = []
1903 while mask[i:].count(1):
1904 i += mask[i:].index(1)
1905 if mask[i:].count(0):
1906 j = i + mask[i:].index(0)
1907 else:
1908 j = len(mask)
1909 segments.append([i, j])
1910 i = j
1911 return segments
1912
1913 def _get_ordinate_label(self):
1914 fu = "("+self.get_fluxunit()+")"
1915 import re
1916 lbl = "Intensity"
1917 if re.match(".K.", fu):
1918 lbl = "Brightness Temperature "+ fu
1919 elif re.match(".Jy.", fu):
1920 lbl = "Flux density "+ fu
1921 return lbl
1922
1923 def _check_ifs(self):
1924 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1925 nchans = filter(lambda t: t > 0, nchans)
1926 return (sum(nchans)/len(nchans) == nchans[0])
1927
1928 def _fill(self, names, unit, average, getpt):
1929 import os
1930 from asap._asap import stfiller
1931 first = True
1932 fullnames = []
1933 for name in names:
1934 name = os.path.expandvars(name)
1935 name = os.path.expanduser(name)
1936 if not os.path.exists(name):
1937 msg = "File '%s' does not exists" % (name)
1938 if rcParams['verbose']:
1939 asaplog.push(msg)
1940 print asaplog.pop().strip()
1941 return
1942 raise IOError(msg)
1943 fullnames.append(name)
1944 if average:
1945 asaplog.push('Auto averaging integrations')
1946 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1947 for name in fullnames:
1948 tbl = Scantable(stype)
1949 r = stfiller(tbl)
1950 msg = "Importing %s..." % (name)
1951 asaplog.push(msg, False)
1952 print_log()
1953 r._open(name, -1, -1, getpt)
1954 r._read()
1955 #tbl = r._getdata()
1956 if average:
1957 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1958 #tbl = tbl2
1959 if not first:
1960 tbl = self._math._merge([self, tbl])
1961 #tbl = tbl2
1962 Scantable.__init__(self, tbl)
1963 r._close()
1964 del r, tbl
1965 first = False
1966 if unit is not None:
1967 self.set_fluxunit(unit)
1968 #self.set_freqframe(rcParams['scantable.freqframe'])
1969
Note: See TracBrowser for help on using the repository browser.