source: branches/alma/python/scantable.py@ 1523

Last change on this file since 1523 was 1522, checked in by TakTsutsumi, 16 years ago

New Development: No

JIRA Issue: No

Ready to Release: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: No

Module(s): Module Names change impacts.

Description: Updated the documentation for getpt


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 74.2 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None, getpt=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 getpt: for MeasurementSet input data only:
33 If True, all pointing data are filled.
34 The deafult is False, which makes time to load
35 the MS data faster in some cases.
36 """
37 if average is None:
38 average = rcParams['scantable.autoaverage']
39 if getpt is None:
40 getpt = False
41 varlist = vars()
42 from asap._asap import stmath
43 self._math = stmath()
44 if isinstance(filename, Scantable):
45 Scantable.__init__(self, filename)
46 else:
47 if isinstance(filename, str):
48 import os.path
49 filename = os.path.expandvars(filename)
50 filename = os.path.expanduser(filename)
51 if not os.path.exists(filename):
52 s = "File '%s' not found." % (filename)
53 if rcParams['verbose']:
54 asaplog.push(s)
55 print asaplog.pop().strip()
56 return
57 raise IOError(s)
58 if os.path.isdir(filename) \
59 and not os.path.exists(filename+'/table.f1'):
60 # crude check if asap table
61 if os.path.exists(filename+'/table.info'):
62 ondisk = rcParams['scantable.storage'] == 'disk'
63 Scantable.__init__(self, filename, ondisk)
64 if unit is not None:
65 self.set_fluxunit(unit)
66 # do not reset to the default freqframe
67 #self.set_freqframe(rcParams['scantable.freqframe'])
68 else:
69 msg = "The given file '%s'is not a valid " \
70 "asap table." % (filename)
71 if rcParams['verbose']:
72 print msg
73 return
74 else:
75 raise IOError(msg)
76 else:
77 self._fill([filename], unit, average, getpt)
78 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
79 and isinstance(filename[-1], str):
80 self._fill(filename, unit, average, getpt)
81 self._add_history("scantable", varlist)
82 print_log()
83
84 def save(self, name=None, format=None, overwrite=False):
85 """
86 Store the scantable on disk. This can be an asap (aips++) Table,
87 SDFITS or MS2 format.
88 Parameters:
89 name: the name of the outputfile. For format "ASCII"
90 this is the root file name (data in 'name'.txt
91 and header in 'name'_header.txt)
92 format: an optional file format. Default is ASAP.
93 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
94 'SDFITS' (save as SDFITS file)
95 'ASCII' (saves as ascii text file)
96 'MS2' (saves as an aips++
97 MeasurementSet V2)
98 overwrite: If the file should be overwritten if it exists.
99 The default False is to return with warning
100 without writing the output. USE WITH CARE.
101 Example:
102 scan.save('myscan.asap')
103 scan.save('myscan.sdfits', 'SDFITS')
104 """
105 from os import path
106 if format is None: format = rcParams['scantable.save']
107 suffix = '.'+format.lower()
108 if name is None or name == "":
109 name = 'scantable'+suffix
110 msg = "No filename given. Using default name %s..." % name
111 asaplog.push(msg)
112 name = path.expandvars(name)
113 if path.isfile(name) or path.isdir(name):
114 if not overwrite:
115 msg = "File %s exists." % name
116 if rcParams['verbose']:
117 print msg
118 return
119 else:
120 raise IOError(msg)
121 format2 = format.upper()
122 if format2 == 'ASAP':
123 self._save(name)
124 else:
125 from asap._asap import stwriter as stw
126 writer = stw(format2)
127 writer.write(self, name)
128 print_log()
129 return
130
131 def copy(self):
132 """
133 Return a copy of this scantable.
134 Note:
135 This makes a full (deep) copy. scan2 = scan1 makes a reference.
136 Parameters:
137 none
138 Example:
139 copiedscan = scan.copy()
140 """
141 sd = scantable(Scantable._copy(self))
142 return sd
143
144 def drop_scan(self, scanid=None):
145 """
146 Return a new scantable where the specified scan number(s) has(have)
147 been dropped.
148 Parameters:
149 scanid: a (list of) scan number(s)
150 """
151 from asap import _is_sequence_or_number as _is_valid
152 from asap import _to_list
153 from asap import unique
154 if not _is_valid(scanid):
155 if rcParams['verbose']:
156 print "Please specify a scanno to drop from the scantable"
157 return
158 else:
159 raise RuntimeError("No scan given")
160 try:
161 scanid = _to_list(scanid)
162 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
163 for sid in scanid: allscans.remove(sid)
164 if len(allscans) == 0:
165 raise ValueError("Can't remove all scans")
166 except ValueError:
167 if rcParams['verbose']:
168 print "Couldn't find any match."
169 return
170 else: raise
171 try:
172 bsel = self.get_selection()
173 sel = selector()
174 sel.set_scans(allscans)
175 self.set_selection(bsel+sel)
176 scopy = self._copy()
177 self.set_selection(bsel)
178 return scantable(scopy)
179 except RuntimeError:
180 if rcParams['verbose']:
181 print "Couldn't find any match."
182 else:
183 raise
184
185
186 def get_scan(self, scanid=None):
187 """
188 Return a specific scan (by scanno) or collection of scans (by
189 source name) in a new scantable.
190 Note:
191 See scantable.drop_scan() for the inverse operation.
192 Parameters:
193 scanid: a (list of) scanno or a source name, unix-style
194 patterns are accepted for source name matching, e.g.
195 '*_R' gets all 'ref scans
196 Example:
197 # get all scans containing the source '323p459'
198 newscan = scan.get_scan('323p459')
199 # get all 'off' scans
200 refscans = scan.get_scan('*_R')
201 # get a susbset of scans by scanno (as listed in scan.summary())
202 newscan = scan.get_scan([0, 2, 7, 10])
203 """
204 if scanid is None:
205 if rcParams['verbose']:
206 print "Please specify a scan no or name to " \
207 "retrieve from the scantable"
208 return
209 else:
210 raise RuntimeError("No scan given")
211
212 try:
213 bsel = self.get_selection()
214 sel = selector()
215 if type(scanid) is str:
216 sel.set_name(scanid)
217 self.set_selection(bsel+sel)
218 scopy = self._copy()
219 self.set_selection(bsel)
220 return scantable(scopy)
221 elif type(scanid) is int:
222 sel.set_scans([scanid])
223 self.set_selection(bsel+sel)
224 scopy = self._copy()
225 self.set_selection(bsel)
226 return scantable(scopy)
227 elif type(scanid) is list:
228 sel.set_scans(scanid)
229 self.set_selection(sel)
230 scopy = self._copy()
231 self.set_selection(bsel)
232 return scantable(scopy)
233 else:
234 msg = "Illegal scanid type, use 'int' or 'list' if ints."
235 if rcParams['verbose']:
236 print msg
237 else:
238 raise TypeError(msg)
239 except RuntimeError:
240 if rcParams['verbose']: print "Couldn't find any match."
241 else: raise
242
243 def __str__(self):
244 return Scantable._summary(self, True)
245
246 def summary(self, filename=None):
247 """
248 Print a summary of the contents of this scantable.
249 Parameters:
250 filename: the name of a file to write the putput to
251 Default - no file output
252 verbose: print extra info such as the frequency table
253 The default (False) is taken from .asaprc
254 """
255 info = Scantable._summary(self, True)
256 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
257 if filename is not None:
258 if filename is "":
259 filename = 'scantable_summary.txt'
260 from os.path import expandvars, isdir
261 filename = expandvars(filename)
262 if not isdir(filename):
263 data = open(filename, 'w')
264 data.write(info)
265 data.close()
266 else:
267 msg = "Illegal file name '%s'." % (filename)
268 if rcParams['verbose']:
269 print msg
270 else:
271 raise IOError(msg)
272 if rcParams['verbose']:
273 try:
274 from IPython.genutils import page as pager
275 except ImportError:
276 from pydoc import pager
277 pager(info)
278 else:
279 return info
280
281
282 def get_selection(self):
283 """
284 Get the selection object currently set on this scantable.
285 Parameters:
286 none
287 Example:
288 sel = scan.get_selection()
289 sel.set_ifs(0) # select IF 0
290 scan.set_selection(sel) # apply modified selection
291 """
292 return selector(self._getselection())
293
294 def set_selection(self, selection=selector()):
295 """
296 Select a subset of the data. All following operations on this scantable
297 are only applied to thi selection.
298 Parameters:
299 selection: a selector object (default unset the selection)
300 Examples:
301 sel = selector() # create a selection object
302 self.set_scans([0, 3]) # select SCANNO 0 and 3
303 scan.set_selection(sel) # set the selection
304 scan.summary() # will only print summary of scanno 0 an 3
305 scan.set_selection() # unset the selection
306 """
307 self._setselection(selection)
308
309 def get_row(self, row=0, insitu=None):
310 """
311 Select a row in the scantable.
312 Return a scantable with single row.
313 Parameters:
314 row: row no of integration, default is 0.
315 insitu: if False a new scantable is returned.
316 Otherwise, the scaling is done in-situ
317 The default is taken from .asaprc (False)
318 """
319 if insitu is None: insitu = rcParams['insitu']
320 if not insitu:
321 workscan = self.copy()
322 else:
323 workscan = self
324 # Select a row
325 sel=selector()
326 sel.set_scans([workscan.getscan(row)])
327 sel.set_cycles([workscan.getcycle(row)])
328 sel.set_beams([workscan.getbeam(row)])
329 sel.set_ifs([workscan.getif(row)])
330 sel.set_polarisations([workscan.getpol(row)])
331 sel.set_name(workscan._getsourcename(row))
332 workscan.set_selection(sel)
333 if not workscan.nrow() == 1:
334 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
335 raise RuntimeError(msg)
336 del sel
337 if insitu:
338 self._assign(workscan)
339 else:
340 return workscan
341
342 def stats(self, stat='stddev', mask=None):
343 """
344 Determine the specified statistic of the current beam/if/pol
345 Takes a 'mask' as an optional parameter to specify which
346 channels should be excluded.
347 You can get min/max values with their
348 channels/frequencies/velocities by selecting stat='min_abc'
349 or 'max_abc'.
350 Parameters:
351 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
352 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
353 mask: an optional mask specifying where the statistic
354 should be determined.
355 Example:
356 scan.set_unit('channel')
357 msk = scan.create_mask([100, 200], [500, 600])
358 scan.stats(stat='mean', mask=m)
359 """
360 if mask == None:
361 mask = []
362 axes = ['Beam', 'IF', 'Pol', 'Time']
363 if not self._check_ifs():
364 raise ValueError("Cannot apply mask as the IFs have different "
365 "number of channels. Please use setselection() "
366 "to select individual IFs")
367
368 if stat.lower().find('_abc') == -1:
369 statvals = self._math._stats(self, mask, stat)
370 getchan = False
371 sstat = str(stat)
372 else:
373 chan = self._math._minmaxchan(self, mask, stat)
374 getchan = True
375 statvals = []
376 sstat = stat.lower().strip('_abc')
377 out = ''
378 axes = []
379 for i in range(self.nrow()):
380 axis = []
381 axis.append(self.getscan(i))
382 axis.append(self.getbeam(i))
383 axis.append(self.getif(i))
384 axis.append(self.getpol(i))
385 axis.append(self.getcycle(i))
386 axes.append(axis)
387 tm = self._gettime(i)
388 src = self._getsourcename(i)
389 xpos = ''
390 if getchan:
391 qx, qy = self.chan2data(rowno=i, chan=chan[i])
392 statvals.append(qy['value'])
393 xpos = '(x = %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
394 out += 'Scan[%d] (%s) ' % (axis[0], src)
395 out += 'Time[%s]:\n' % (tm)
396 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
397 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
398 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
399 out += '= %3.3f ' % (statvals[i]) +xpos+'\n'
400 out += "--------------------------------------------------\n"
401
402 if rcParams['verbose']:
403 print "--------------------------------------------------"
404 print " ", sstat
405 print "--------------------------------------------------"
406 print out
407 #else:
408 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
409 # 'axes' : axes,
410 # 'data': statvals}
411 return statvals
412
413 def chan2data(self, rowno=0, chan=0):
414 """
415 Returns channel/frequency/velocity and spectral value
416 at an arbitrary row and channel in the scantable.
417 Parameters:
418 rowno: a row number in the scantable. Default is the
419 first row, i.e. rowno=0
420 chan: a channel in the scantable. Default is the first
421 channel, i.e. pos=0
422 """
423 if isinstance(rowno, int) and isinstance(chan, int):
424 x, xlbl = self.get_abcissa(rowno)
425 qx = {'unit': xlbl, 'value': x[chan]}
426 qy = {'unit': self.get_fluxunit(),
427 'value': self._getspectrum(rowno)[chan]}
428 return qx, qy
429
430 def stddev(self, mask=None):
431 """
432 Determine the standard deviation of the current beam/if/pol
433 Takes a 'mask' as an optional parameter to specify which
434 channels should be excluded.
435 Parameters:
436 mask: an optional mask specifying where the standard
437 deviation should be determined.
438
439 Example:
440 scan.set_unit('channel')
441 msk = scan.create_mask([100, 200], [500, 600])
442 scan.stddev(mask=m)
443 """
444 return self.stats(stat='stddev', mask=mask);
445
446
447 def get_column_names(self):
448 """
449 Return a list of column names, which can be used for selection.
450 """
451 return list(Scantable.get_column_names(self))
452
453 def get_tsys(self):
454 """
455 Return the System temperatures.
456 Returns:
457 a list of Tsys values for the current selection
458 """
459
460 return self._row_callback(self._gettsys, "Tsys")
461
462 def _row_callback(self, callback, label):
463 axes = []
464 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
465 out = ""
466 outvec = []
467 for i in range(self.nrow()):
468 axis = []
469 axis.append(self.getscan(i))
470 axis.append(self.getbeam(i))
471 axis.append(self.getif(i))
472 axis.append(self.getpol(i))
473 axis.append(self.getcycle(i))
474 axes.append(axis)
475 tm = self._gettime(i)
476 src = self._getsourcename(i)
477 out += 'Scan[%d] (%s) ' % (axis[0], src)
478 out += 'Time[%s]:\n' % (tm)
479 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
480 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
481 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
482 outvec.append(callback(i))
483 out += '= %3.3f\n' % (outvec[i])
484 out += "--------------------------------------------------\n"
485 if rcParams['verbose']:
486 print "--------------------------------------------------"
487 print " %s" % (label)
488 print "--------------------------------------------------"
489 print out
490 # disabled because the vector seems more useful
491 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
492 return outvec
493
494 def _get_column(self, callback, row=-1):
495 """
496 """
497 if row == -1:
498 return [callback(i) for i in range(self.nrow())]
499 else:
500 if 0 <= row < self.nrow():
501 return callback(row)
502
503
504 def get_time(self, row=-1, asdatetime=False):
505 """
506 Get a list of time stamps for the observations.
507 Return a datetime object for each integration time stamp in the scantable.
508 Parameters:
509 row: row no of integration. Default -1 return all rows
510 asdatetime: return values as datetime objects rather than strings
511 Example:
512 none
513 """
514 from time import strptime
515 from datetime import datetime
516 times = self._get_column(self._gettime, row)
517 if not asdatetime:
518 return times
519 format = "%Y/%m/%d/%H:%M:%S"
520 if isinstance(times, list):
521 return [datetime(*strptime(i, format)[:6]) for i in times]
522 else:
523 return datetime(*strptime(times, format)[:6])
524
525
526 def get_inttime(self, row=-1):
527 """
528 Get a list of integration times for the observations.
529 Return a time in seconds for each integration in the scantable.
530 Parameters:
531 row: row no of integration. Default -1 return all rows.
532 Example:
533 none
534 """
535 return self._get_column(self._getinttime, row)
536
537
538 def get_sourcename(self, row=-1):
539 """
540 Get a list source names for the observations.
541 Return a string for each integration in the scantable.
542 Parameters:
543 row: row no of integration. Default -1 return all rows.
544 Example:
545 none
546 """
547 return self._get_column(self._getsourcename, row)
548
549 def get_elevation(self, row=-1):
550 """
551 Get a list of elevations for the observations.
552 Return a float for each integration in the scantable.
553 Parameters:
554 row: row no of integration. Default -1 return all rows.
555 Example:
556 none
557 """
558 return self._get_column(self._getelevation, row)
559
560 def get_azimuth(self, row=-1):
561 """
562 Get a list of azimuths for the observations.
563 Return a float for each integration in the scantable.
564 Parameters:
565 row: row no of integration. Default -1 return all rows.
566 Example:
567 none
568 """
569 return self._get_column(self._getazimuth, row)
570
571 def get_parangle(self, row=-1):
572 """
573 Get a list of parallactic angles for the observations.
574 Return a float for each integration in the scantable.
575 Parameters:
576 row: row no of integration. Default -1 return all rows.
577 Example:
578 none
579 """
580 return self._get_column(self._getparangle, row)
581
582 def get_direction(self, row=-1):
583 """
584 Get a list of Positions on the sky (direction) for the observations.
585 Return a float for each integration in the scantable.
586 Parameters:
587 row: row no of integration. Default -1 return all rows
588 Example:
589 none
590 """
591 return self._get_column(self._getdirection, row)
592
593 def get_directionval(self, row=-1):
594 """
595 Get a list of Positions on the sky (direction) for the observations.
596 Return a float for each integration in the scantable.
597 Parameters:
598 row: row no of integration. Default -1 return all rows
599 Example:
600 none
601 """
602 return self._get_column(self._getdirectionvec, row)
603
604 def set_unit(self, unit='channel'):
605 """
606 Set the unit for all following operations on this scantable
607 Parameters:
608 unit: optional unit, default is 'channel'
609 one of '*Hz', 'km/s', 'channel', ''
610 """
611 varlist = vars()
612 if unit in ['', 'pixel', 'channel']:
613 unit = ''
614 inf = list(self._getcoordinfo())
615 inf[0] = unit
616 self._setcoordinfo(inf)
617 self._add_history("set_unit", varlist)
618
619 def set_instrument(self, instr):
620 """
621 Set the instrument for subsequent processing.
622 Parameters:
623 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
624 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
625 """
626 self._setInstrument(instr)
627 self._add_history("set_instument", vars())
628 print_log()
629
630 def set_feedtype(self, feedtype):
631 """
632 Overwrite the feed type, which might not be set correctly.
633 Parameters:
634 feedtype: 'linear' or 'circular'
635 """
636 self._setfeedtype(feedtype)
637 self._add_history("set_feedtype", vars())
638 print_log()
639
640 def set_doppler(self, doppler='RADIO'):
641 """
642 Set the doppler for all following operations on this scantable.
643 Parameters:
644 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
645 """
646 varlist = vars()
647 inf = list(self._getcoordinfo())
648 inf[2] = doppler
649 self._setcoordinfo(inf)
650 self._add_history("set_doppler", vars())
651 print_log()
652
653 def set_freqframe(self, frame=None):
654 """
655 Set the frame type of the Spectral Axis.
656 Parameters:
657 frame: an optional frame type, default 'LSRK'. Valid frames are:
658 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
659 'GEO', 'GALACTO', 'LGROUP', 'CMB'
660 Examples:
661 scan.set_freqframe('BARY')
662 """
663 if frame is None: frame = rcParams['scantable.freqframe']
664 varlist = vars()
665 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
666 'GEO', 'GALACTO', 'LGROUP', 'CMB']
667
668 if frame in valid:
669 inf = list(self._getcoordinfo())
670 inf[1] = frame
671 self._setcoordinfo(inf)
672 self._add_history("set_freqframe", varlist)
673 else:
674 msg = "Please specify a valid freq type. Valid types are:\n", valid
675 if rcParams['verbose']:
676 print msg
677 else:
678 raise TypeError(msg)
679 print_log()
680
681 def set_dirframe(self, frame=""):
682 """
683 Set the frame type of the Direction on the sky.
684 Parameters:
685 frame: an optional frame type, default ''. Valid frames are:
686 'J2000', 'B1950', 'GALACTIC'
687 Examples:
688 scan.set_dirframe('GALACTIC')
689 """
690 varlist = vars()
691 try:
692 Scantable.set_dirframe(self, frame)
693 except RuntimeError, msg:
694 if rcParams['verbose']:
695 print msg
696 else:
697 raise
698 self._add_history("set_dirframe", varlist)
699
700 def get_unit(self):
701 """
702 Get the default unit set in this scantable
703 Returns:
704 A unit string
705 """
706 inf = self._getcoordinfo()
707 unit = inf[0]
708 if unit == '': unit = 'channel'
709 return unit
710
711 def get_abcissa(self, rowno=0):
712 """
713 Get the abcissa in the current coordinate setup for the currently
714 selected Beam/IF/Pol
715 Parameters:
716 rowno: an optional row number in the scantable. Default is the
717 first row, i.e. rowno=0
718 Returns:
719 The abcissa values and the format string (as a dictionary)
720 """
721 abc = self._getabcissa(rowno)
722 lbl = self._getabcissalabel(rowno)
723 print_log()
724 return abc, lbl
725
726 def flag(self, mask=None, unflag=False):
727 """
728 Flag the selected data using an optional channel mask.
729 Parameters:
730 mask: an optional channel mask, created with create_mask. Default
731 (no mask) is all channels.
732 unflag: if True, unflag the data
733 """
734 varlist = vars()
735 if mask is None:
736 mask = []
737 try:
738 self._flag(mask, unflag)
739 except RuntimeError, msg:
740 if rcParams['verbose']:
741 print msg
742 return
743 else: raise
744 self._add_history("flag", varlist)
745
746 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
747 """
748 Flag the data in 'lag' space by providing a frequency to remove.
749 Flagged data in the scantable gets set to 0.0 before the fft.
750 No taper is applied.
751 Parameters:
752 frequency: the frequency (really a period within the bandwidth)
753 to remove
754 width: the width of the frequency to remove, to remove a
755 range of frequencies aroung the centre.
756 unit: the frequency unit (default "GHz")
757 Notes:
758 It is recommended to flag edges of the band or strong
759 signals beforehand.
760 """
761 if insitu is None: insitu = rcParams['insitu']
762 self._math._setinsitu(insitu)
763 varlist = vars()
764 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
765 if not base.has_key(unit):
766 raise ValueError("%s is not a valid unit." % unit)
767 try:
768 s = scantable(self._math._lag_flag(self, frequency*base[unit],
769 width*base[unit]))
770 except RuntimeError, msg:
771 if rcParams['verbose']:
772 print msg
773 return
774 else: raise
775 s._add_history("lag_flag", varlist)
776 print_log()
777 if insitu:
778 self._assign(s)
779 else:
780 return s
781
782
783 def create_mask(self, *args, **kwargs):
784 """
785 Compute and return a mask based on [min, max] windows.
786 The specified windows are to be INCLUDED, when the mask is
787 applied.
788 Parameters:
789 [min, max], [min2, max2], ...
790 Pairs of start/end points (inclusive)specifying the regions
791 to be masked
792 invert: optional argument. If specified as True,
793 return an inverted mask, i.e. the regions
794 specified are EXCLUDED
795 row: create the mask using the specified row for
796 unit conversions, default is row=0
797 only necessary if frequency varies over rows.
798 Example:
799 scan.set_unit('channel')
800 a)
801 msk = scan.create_mask([400, 500], [800, 900])
802 # masks everything outside 400 and 500
803 # and 800 and 900 in the unit 'channel'
804
805 b)
806 msk = scan.create_mask([400, 500], [800, 900], invert=True)
807 # masks the regions between 400 and 500
808 # and 800 and 900 in the unit 'channel'
809 c)
810 mask only channel 400
811 msk = scan.create_mask([400, 400])
812 """
813 row = 0
814 if kwargs.has_key("row"):
815 row = kwargs.get("row")
816 data = self._getabcissa(row)
817 u = self._getcoordinfo()[0]
818 if rcParams['verbose']:
819 if u == "": u = "channel"
820 msg = "The current mask window unit is %s" % u
821 i = self._check_ifs()
822 if not i:
823 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
824 asaplog.push(msg)
825 n = self.nchan()
826 msk = _n_bools(n, False)
827 # test if args is a 'list' or a 'normal *args - UGLY!!!
828
829 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
830 and args or args[0]
831 for window in ws:
832 if (len(window) != 2 or window[0] > window[1] ):
833 raise TypeError("A window needs to be defined as [min, max]")
834 for i in range(n):
835 if data[i] >= window[0] and data[i] <= window[1]:
836 msk[i] = True
837 if kwargs.has_key('invert'):
838 if kwargs.get('invert'):
839 msk = mask_not(msk)
840 print_log()
841 return msk
842
843 def get_masklist(self, mask=None, row=0):
844 """
845 Compute and return a list of mask windows, [min, max].
846 Parameters:
847 mask: channel mask, created with create_mask.
848 row: calcutate the masklist using the specified row
849 for unit conversions, default is row=0
850 only necessary if frequency varies over rows.
851 Returns:
852 [min, max], [min2, max2], ...
853 Pairs of start/end points (inclusive)specifying
854 the masked regions
855 """
856 if not (isinstance(mask,list) or isinstance(mask, tuple)):
857 raise TypeError("The mask should be list or tuple.")
858 if len(mask) < 2:
859 raise TypeError("The mask elements should be > 1")
860 if self.nchan() != len(mask):
861 msg = "Number of channels in scantable != number of mask elements"
862 raise TypeError(msg)
863 data = self._getabcissa(row)
864 u = self._getcoordinfo()[0]
865 if rcParams['verbose']:
866 if u == "": u = "channel"
867 msg = "The current mask window unit is %s" % u
868 i = self._check_ifs()
869 if not i:
870 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
871 asaplog.push(msg)
872 masklist=[]
873 ist, ien = None, None
874 ist, ien=self.get_mask_indices(mask)
875 if ist is not None and ien is not None:
876 for i in xrange(len(ist)):
877 range=[data[ist[i]],data[ien[i]]]
878 range.sort()
879 masklist.append([range[0],range[1]])
880 return masklist
881
882 def get_mask_indices(self, mask=None):
883 """
884 Compute and Return lists of mask start indices and mask end indices.
885 Parameters:
886 mask: channel mask, created with create_mask.
887 Returns:
888 List of mask start indices and that of mask end indices,
889 i.e., [istart1,istart2,....], [iend1,iend2,....].
890 """
891 if not (isinstance(mask,list) or isinstance(mask, tuple)):
892 raise TypeError("The mask should be list or tuple.")
893 if len(mask) < 2:
894 raise TypeError("The mask elements should be > 1")
895 istart=[]
896 iend=[]
897 if mask[0]: istart.append(0)
898 for i in range(len(mask)-1):
899 if not mask[i] and mask[i+1]:
900 istart.append(i+1)
901 elif mask[i] and not mask[i+1]:
902 iend.append(i)
903 if mask[len(mask)-1]: iend.append(len(mask)-1)
904 if len(istart) != len(iend):
905 raise RuntimeError("Numbers of mask start != mask end.")
906 for i in range(len(istart)):
907 if istart[i] > iend[i]:
908 raise RuntimeError("Mask start index > mask end index")
909 break
910 return istart,iend
911
912# def get_restfreqs(self):
913# """
914# Get the restfrequency(s) stored in this scantable.
915# The return value(s) are always of unit 'Hz'
916# Parameters:
917# none
918# Returns:
919# a list of doubles
920# """
921# return list(self._getrestfreqs())
922
923 def get_restfreqs(self, ids=None):
924 """
925 Get the restfrequency(s) stored in this scantable.
926 The return value(s) are always of unit 'Hz'
927 Parameters:
928 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
929 be retrieved
930 Returns:
931 dictionary containing ids and a list of doubles for each id
932 """
933 if ids is None:
934 rfreqs={}
935 idlist = self.getmolnos()
936 for i in idlist:
937 rfreqs[i]=list(self._getrestfreqs(i))
938 return rfreqs
939 else:
940 if type(ids)==list or type(ids)==tuple:
941 rfreqs={}
942 for i in ids:
943 rfreqs[i]=list(self._getrestfreqs(i))
944 return rfreqs
945 else:
946 return list(self._getrestfreqs(ids))
947 #return list(self._getrestfreqs(ids))
948
949 def set_restfreqs(self, freqs=None, unit='Hz'):
950 """
951 ********NEED TO BE UPDATED begin************
952 Set or replace the restfrequency specified and
953 If the 'freqs' argument holds a scalar,
954 then that rest frequency will be applied to all the selected
955 data. If the 'freqs' argument holds
956 a vector, then it MUST be of equal or smaller length than
957 the number of IFs (and the available restfrequencies will be
958 replaced by this vector). In this case, *all* data have
959 the restfrequency set per IF according
960 to the corresponding value you give in the 'freqs' vector.
961 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
962 IF 1 gets restfreq 2e9.
963 ********NEED TO BE UPDATED end************
964 You can also specify the frequencies via a linecatalog/
965
966 Parameters:
967 freqs: list of rest frequency values or string idenitfiers
968 unit: unit for rest frequency (default 'Hz')
969
970 Example:
971 # set the given restfrequency for the all currently selected IFs
972 scan.set_restfreqs(freqs=1.4e9)
973 # set multiple restfrequencies to all the selected data
974 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
975 # If the number of IFs in the data is >= 2 the IF0 gets the first
976 # value IF1 the second... NOTE that freqs needs to be
977 # specified in list of list (e.g. [[],[],...] ).
978 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
979 #set the given restfrequency for the whole table (by name)
980 scan.set_restfreqs(freqs="OH1667")
981
982 Note:
983 To do more sophisticate Restfrequency setting, e.g. on a
984 source and IF basis, use scantable.set_selection() before using
985 this function.
986 # provide your scantable is call scan
987 selection = selector()
988 selection.set_name("ORION*")
989 selection.set_ifs([1])
990 scan.set_selection(selection)
991 scan.set_restfreqs(freqs=86.6e9)
992
993 """
994 varlist = vars()
995 from asap import linecatalog
996 # simple value
997 if isinstance(freqs, int) or isinstance(freqs, float):
998 # TT mod
999 #self._setrestfreqs(freqs, "",unit)
1000 self._setrestfreqs([freqs], [""],unit)
1001 # list of values
1002 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1003 # list values are scalars
1004 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1005 self._setrestfreqs(freqs, [""],unit)
1006 # list values are tuples, (value, name)
1007 elif isinstance(freqs[-1], dict):
1008 #sel = selector()
1009 #savesel = self._getselection()
1010 #iflist = self.getifnos()
1011 #for i in xrange(len(freqs)):
1012 # sel.set_ifs(iflist[i])
1013 # self._setselection(sel)
1014 # self._setrestfreqs(freqs[i], "",unit)
1015 #self._setselection(savesel)
1016 self._setrestfreqs(freqs["value"],
1017 freqs["name"], "MHz")
1018 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1019 sel = selector()
1020 savesel = self._getselection()
1021 iflist = self.getifnos()
1022 if len(freqs)>len(iflist):
1023 raise ValueError("number of elements in list of list exeeds the current IF selections")
1024 for i in xrange(len(freqs)):
1025 sel.set_ifs(iflist[i])
1026 self._setselection(sel)
1027 self._setrestfreqs(freqs[i]["value"],
1028 freqs[i]["name"], "MHz")
1029 self._setselection(savesel)
1030 # freqs are to be taken from a linecatalog
1031 elif isinstance(freqs, linecatalog):
1032 sel = selector()
1033 savesel = self._getselection()
1034 for i in xrange(freqs.nrow()):
1035 sel.set_ifs(iflist[i])
1036 self._setselection(sel)
1037 self._setrestfreqs(freqs.get_frequency(i),
1038 freqs.get_name(i), "MHz")
1039 # ensure that we are not iterating past nIF
1040 if i == self.nif()-1: break
1041 self._setselection(savesel)
1042 else:
1043 return
1044 self._add_history("set_restfreqs", varlist)
1045
1046 def shift_refpix(self, delta):
1047 """
1048 Shift the reference pixel of the Spectra Coordinate by an
1049 integer amount.
1050 Parameters:
1051 delta: the amount to shift by
1052 Note:
1053 Be careful using this with broadband data.
1054 """
1055 Scantable.shift(self, delta)
1056
1057 def history(self, filename=None):
1058 """
1059 Print the history. Optionally to a file.
1060 Parameters:
1061 filename: The name of the file to save the history to.
1062 """
1063 hist = list(self._gethistory())
1064 out = "-"*80
1065 for h in hist:
1066 if h.startswith("---"):
1067 out += "\n"+h
1068 else:
1069 items = h.split("##")
1070 date = items[0]
1071 func = items[1]
1072 items = items[2:]
1073 out += "\n"+date+"\n"
1074 out += "Function: %s\n Parameters:" % (func)
1075 for i in items:
1076 s = i.split("=")
1077 out += "\n %s = %s" % (s[0], s[1])
1078 out += "\n"+"-"*80
1079 if filename is not None:
1080 if filename is "":
1081 filename = 'scantable_history.txt'
1082 import os
1083 filename = os.path.expandvars(os.path.expanduser(filename))
1084 if not os.path.isdir(filename):
1085 data = open(filename, 'w')
1086 data.write(out)
1087 data.close()
1088 else:
1089 msg = "Illegal file name '%s'." % (filename)
1090 if rcParams['verbose']:
1091 print msg
1092 else:
1093 raise IOError(msg)
1094 if rcParams['verbose']:
1095 try:
1096 from IPython.genutils import page as pager
1097 except ImportError:
1098 from pydoc import pager
1099 pager(out)
1100 else:
1101 return out
1102 return
1103 #
1104 # Maths business
1105 #
1106
1107 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1108 """
1109 Return the (time) weighted average of a scan.
1110 Note:
1111 in channels only - align if necessary
1112 Parameters:
1113 mask: an optional mask (only used for 'var' and 'tsys'
1114 weighting)
1115 scanav: True averages each scan separately
1116 False (default) averages all scans together,
1117 weight: Weighting scheme.
1118 'none' (mean no weight)
1119 'var' (1/var(spec) weighted)
1120 'tsys' (1/Tsys**2 weighted)
1121 'tint' (integration time weighted)
1122 'tintsys' (Tint/Tsys**2)
1123 'median' ( median averaging)
1124 The default is 'tint'
1125 align: align the spectra in velocity before averaging. It takes
1126 the time of the first spectrum as reference time.
1127 Example:
1128 # time average the scantable without using a mask
1129 newscan = scan.average_time()
1130 """
1131 varlist = vars()
1132 if weight is None: weight = 'TINT'
1133 if mask is None: mask = ()
1134 if scanav: scanav = "SCAN"
1135 else: scanav = "NONE"
1136 scan = (self, )
1137 try:
1138 if align:
1139 scan = (self.freq_align(insitu=False), )
1140 s = None
1141 if weight.upper() == 'MEDIAN':
1142 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1143 scanav))
1144 else:
1145 s = scantable(self._math._average(scan, mask, weight.upper(),
1146 scanav))
1147 except RuntimeError, msg:
1148 if rcParams['verbose']:
1149 print msg
1150 return
1151 else: raise
1152 s._add_history("average_time", varlist)
1153 print_log()
1154 return s
1155
1156 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1157 """
1158 Return a scan where all spectra are converted to either
1159 Jansky or Kelvin depending upon the flux units of the scan table.
1160 By default the function tries to look the values up internally.
1161 If it can't find them (or if you want to over-ride), you must
1162 specify EITHER jyperk OR eta (and D which it will try to look up
1163 also if you don't set it). jyperk takes precedence if you set both.
1164 Parameters:
1165 jyperk: the Jy / K conversion factor
1166 eta: the aperture efficiency
1167 d: the geomtric diameter (metres)
1168 insitu: if False a new scantable is returned.
1169 Otherwise, the scaling is done in-situ
1170 The default is taken from .asaprc (False)
1171 """
1172 if insitu is None: insitu = rcParams['insitu']
1173 self._math._setinsitu(insitu)
1174 varlist = vars()
1175 if jyperk is None: jyperk = -1.0
1176 if d is None: d = -1.0
1177 if eta is None: eta = -1.0
1178 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1179 s._add_history("convert_flux", varlist)
1180 print_log()
1181 if insitu: self._assign(s)
1182 else: return s
1183
1184 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1185 """
1186 Return a scan after applying a gain-elevation correction.
1187 The correction can be made via either a polynomial or a
1188 table-based interpolation (and extrapolation if necessary).
1189 You specify polynomial coefficients, an ascii table or neither.
1190 If you specify neither, then a polynomial correction will be made
1191 with built in coefficients known for certain telescopes (an error
1192 will occur if the instrument is not known).
1193 The data and Tsys are *divided* by the scaling factors.
1194 Parameters:
1195 poly: Polynomial coefficients (default None) to compute a
1196 gain-elevation correction as a function of
1197 elevation (in degrees).
1198 filename: The name of an ascii file holding correction factors.
1199 The first row of the ascii file must give the column
1200 names and these MUST include columns
1201 "ELEVATION" (degrees) and "FACTOR" (multiply data
1202 by this) somewhere.
1203 The second row must give the data type of the
1204 column. Use 'R' for Real and 'I' for Integer.
1205 An example file would be
1206 (actual factors are arbitrary) :
1207
1208 TIME ELEVATION FACTOR
1209 R R R
1210 0.1 0 0.8
1211 0.2 20 0.85
1212 0.3 40 0.9
1213 0.4 60 0.85
1214 0.5 80 0.8
1215 0.6 90 0.75
1216 method: Interpolation method when correcting from a table.
1217 Values are "nearest", "linear" (default), "cubic"
1218 and "spline"
1219 insitu: if False a new scantable is returned.
1220 Otherwise, the scaling is done in-situ
1221 The default is taken from .asaprc (False)
1222 """
1223
1224 if insitu is None: insitu = rcParams['insitu']
1225 self._math._setinsitu(insitu)
1226 varlist = vars()
1227 if poly is None:
1228 poly = ()
1229 from os.path import expandvars
1230 filename = expandvars(filename)
1231 s = scantable(self._math._gainel(self, poly, filename, method))
1232 s._add_history("gain_el", varlist)
1233 print_log()
1234 if insitu: self._assign(s)
1235 else: return s
1236
1237 def freq_align(self, reftime=None, method='cubic', insitu=None):
1238 """
1239 Return a scan where all rows have been aligned in frequency/velocity.
1240 The alignment frequency frame (e.g. LSRK) is that set by function
1241 set_freqframe.
1242 Parameters:
1243 reftime: reference time to align at. By default, the time of
1244 the first row of data is used.
1245 method: Interpolation method for regridding the spectra.
1246 Choose from "nearest", "linear", "cubic" (default)
1247 and "spline"
1248 insitu: if False a new scantable is returned.
1249 Otherwise, the scaling is done in-situ
1250 The default is taken from .asaprc (False)
1251 """
1252 if insitu is None: insitu = rcParams["insitu"]
1253 self._math._setinsitu(insitu)
1254 varlist = vars()
1255 if reftime is None: reftime = ""
1256 s = scantable(self._math._freq_align(self, reftime, method))
1257 s._add_history("freq_align", varlist)
1258 print_log()
1259 if insitu: self._assign(s)
1260 else: return s
1261
1262 def opacity(self, tau, insitu=None):
1263 """
1264 Apply an opacity correction. The data
1265 and Tsys are multiplied by the correction factor.
1266 Parameters:
1267 tau: Opacity from which the correction factor is
1268 exp(tau*ZD)
1269 where ZD is the zenith-distance
1270 insitu: if False a new scantable is returned.
1271 Otherwise, the scaling is done in-situ
1272 The default is taken from .asaprc (False)
1273 """
1274 if insitu is None: insitu = rcParams['insitu']
1275 self._math._setinsitu(insitu)
1276 varlist = vars()
1277 s = scantable(self._math._opacity(self, tau))
1278 s._add_history("opacity", varlist)
1279 print_log()
1280 if insitu: self._assign(s)
1281 else: return s
1282
1283 def bin(self, width=5, insitu=None):
1284 """
1285 Return a scan where all spectra have been binned up.
1286 Parameters:
1287 width: The bin width (default=5) in pixels
1288 insitu: if False a new scantable is returned.
1289 Otherwise, the scaling is done in-situ
1290 The default is taken from .asaprc (False)
1291 """
1292 if insitu is None: insitu = rcParams['insitu']
1293 self._math._setinsitu(insitu)
1294 varlist = vars()
1295 s = scantable(self._math._bin(self, width))
1296 s._add_history("bin", varlist)
1297 print_log()
1298 if insitu: self._assign(s)
1299 else: return s
1300
1301
1302 def resample(self, width=5, method='cubic', insitu=None):
1303 """
1304 Return a scan where all spectra have been binned up.
1305
1306 Parameters:
1307 width: The bin width (default=5) in pixels
1308 method: Interpolation method when correcting from a table.
1309 Values are "nearest", "linear", "cubic" (default)
1310 and "spline"
1311 insitu: if False a new scantable is returned.
1312 Otherwise, the scaling is done in-situ
1313 The default is taken from .asaprc (False)
1314 """
1315 if insitu is None: insitu = rcParams['insitu']
1316 self._math._setinsitu(insitu)
1317 varlist = vars()
1318 s = scantable(self._math._resample(self, method, width))
1319 s._add_history("resample", varlist)
1320 print_log()
1321 if insitu: self._assign(s)
1322 else: return s
1323
1324
1325 def average_pol(self, mask=None, weight='none'):
1326 """
1327 Average the Polarisations together.
1328 Parameters:
1329 mask: An optional mask defining the region, where the
1330 averaging will be applied. The output will have all
1331 specified points masked.
1332 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1333 weighted), or 'tsys' (1/Tsys**2 weighted)
1334 """
1335 varlist = vars()
1336 if mask is None:
1337 mask = ()
1338 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1339 s._add_history("average_pol", varlist)
1340 print_log()
1341 return s
1342
1343 def average_beam(self, mask=None, weight='none'):
1344 """
1345 Average the Beams together.
1346 Parameters:
1347 mask: An optional mask defining the region, where the
1348 averaging will be applied. The output will have all
1349 specified points masked.
1350 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1351 weighted), or 'tsys' (1/Tsys**2 weighted)
1352 """
1353 varlist = vars()
1354 if mask is None:
1355 mask = ()
1356 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1357 s._add_history("average_beam", varlist)
1358 print_log()
1359 return s
1360
1361 def convert_pol(self, poltype=None):
1362 """
1363 Convert the data to a different polarisation type.
1364 Parameters:
1365 poltype: The new polarisation type. Valid types are:
1366 "linear", "stokes" and "circular"
1367 """
1368 varlist = vars()
1369 try:
1370 s = scantable(self._math._convertpol(self, poltype))
1371 except RuntimeError, msg:
1372 if rcParams['verbose']:
1373 print msg
1374 return
1375 else:
1376 raise
1377 s._add_history("convert_pol", varlist)
1378 print_log()
1379 return s
1380
1381 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1382 """
1383 Smooth the spectrum by the specified kernel (conserving flux).
1384 Parameters:
1385 kernel: The type of smoothing kernel. Select from
1386 'hanning' (default), 'gaussian', 'boxcar' and
1387 'rmedian'
1388 width: The width of the kernel in pixels. For hanning this is
1389 ignored otherwise it defauls to 5 pixels.
1390 For 'gaussian' it is the Full Width Half
1391 Maximum. For 'boxcar' it is the full width.
1392 For 'rmedian' it is the half width.
1393 insitu: if False a new scantable is returned.
1394 Otherwise, the scaling is done in-situ
1395 The default is taken from .asaprc (False)
1396 Example:
1397 none
1398 """
1399 if insitu is None: insitu = rcParams['insitu']
1400 self._math._setinsitu(insitu)
1401 varlist = vars()
1402 s = scantable(self._math._smooth(self, kernel.lower(), width))
1403 s._add_history("smooth", varlist)
1404 print_log()
1405 if insitu: self._assign(s)
1406 else: return s
1407
1408
1409 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1410 """
1411 Return a scan which has been baselined (all rows) by a polynomial.
1412 Parameters:
1413 mask: an optional mask
1414 order: the order of the polynomial (default is 0)
1415 plot: plot the fit and the residual. In this each
1416 indivual fit has to be approved, by typing 'y'
1417 or 'n'
1418 uselin: use linear polynomial fit
1419 insitu: if False a new scantable is returned.
1420 Otherwise, the scaling is done in-situ
1421 The default is taken from .asaprc (False)
1422 Example:
1423 # return a scan baselined by a third order polynomial,
1424 # not using a mask
1425 bscan = scan.poly_baseline(order=3)
1426 """
1427 if insitu is None: insitu = rcParams['insitu']
1428 varlist = vars()
1429 if mask is None:
1430 mask = [True for i in xrange(self.nchan(-1))]
1431 from asap.asapfitter import fitter
1432 try:
1433 f = fitter()
1434 f.set_scan(self, mask)
1435 if uselin:
1436 f.set_function(lpoly=order)
1437 else:
1438 f.set_function(poly=order)
1439 s = f.auto_fit(insitu, plot=plot)
1440 # Save parameters of baseline fits as a class attribute.
1441 # NOTICE: It does not reflect changes in scantable!
1442 self.blpars = f.blpars
1443 s._add_history("poly_baseline", varlist)
1444 print_log()
1445 if insitu: self._assign(s)
1446 else: return s
1447 except RuntimeError:
1448 msg = "The fit failed, possibly because it didn't converge."
1449 if rcParams['verbose']:
1450 print msg
1451 return
1452 else:
1453 raise RuntimeError(msg)
1454
1455
1456 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1457 threshold=3, chan_avg_limit=1, plot=False,
1458 insitu=None):
1459 """
1460 Return a scan which has been baselined (all rows) by a polynomial.
1461 Spectral lines are detected first using linefinder and masked out
1462 to avoid them affecting the baseline solution.
1463
1464 Parameters:
1465 mask: an optional mask retreived from scantable
1466 edge: an optional number of channel to drop at
1467 the edge of spectrum. If only one value is
1468 specified, the same number will be dropped from
1469 both sides of the spectrum. Default is to keep
1470 all channels. Nested tuples represent individual
1471 edge selection for different IFs (a number of spectral
1472 channels can be different)
1473 order: the order of the polynomial (default is 0)
1474 threshold: the threshold used by line finder. It is better to
1475 keep it large as only strong lines affect the
1476 baseline solution.
1477 chan_avg_limit:
1478 a maximum number of consequtive spectral channels to
1479 average during the search of weak and broad lines.
1480 The default is no averaging (and no search for weak
1481 lines). If such lines can affect the fitted baseline
1482 (e.g. a high order polynomial is fitted), increase this
1483 parameter (usually values up to 8 are reasonable). Most
1484 users of this method should find the default value
1485 sufficient.
1486 plot: plot the fit and the residual. In this each
1487 indivual fit has to be approved, by typing 'y'
1488 or 'n'
1489 insitu: if False a new scantable is returned.
1490 Otherwise, the scaling is done in-situ
1491 The default is taken from .asaprc (False)
1492
1493 Example:
1494 scan2=scan.auto_poly_baseline(order=7)
1495 """
1496 if insitu is None: insitu = rcParams['insitu']
1497 varlist = vars()
1498 from asap.asapfitter import fitter
1499 from asap.asaplinefind import linefinder
1500 from asap import _is_sequence_or_number as _is_valid
1501
1502 # check whether edge is set up for each IF individually
1503 individualedge = False;
1504 if len(edge) > 1:
1505 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1506 individualedge = True;
1507
1508 if not _is_valid(edge, int) and not individualedge:
1509 raise ValueError, "Parameter 'edge' has to be an integer or a \
1510 pair of integers specified as a tuple. Nested tuples are allowed \
1511 to make individual selection for different IFs."
1512
1513 curedge = (0, 0)
1514 if individualedge:
1515 for edgepar in edge:
1516 if not _is_valid(edgepar, int):
1517 raise ValueError, "Each element of the 'edge' tuple has \
1518 to be a pair of integers or an integer."
1519 else:
1520 curedge = edge;
1521
1522 # setup fitter
1523 f = fitter()
1524 f.set_function(poly=order)
1525
1526 # setup line finder
1527 fl = linefinder()
1528 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1529
1530 if not insitu:
1531 workscan = self.copy()
1532 else:
1533 workscan = self
1534
1535 fl.set_scan(workscan)
1536
1537 rows = range(workscan.nrow())
1538 # Save parameters of baseline fits & masklists as a class attribute.
1539 # NOTICE: It does not reflect changes in scantable!
1540 if len(rows) > 0:
1541 self.blpars=[]
1542 self.masklists=[]
1543 asaplog.push("Processing:")
1544 for r in rows:
1545 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1546 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1547 workscan.getpol(r), workscan.getcycle(r))
1548 asaplog.push(msg, False)
1549
1550 # figure out edge parameter
1551 if individualedge:
1552 if len(edge) >= workscan.getif(r):
1553 raise RuntimeError, "Number of edge elements appear to " \
1554 "be less than the number of IFs"
1555 curedge = edge[workscan.getif(r)]
1556
1557 # setup line finder
1558 fl.find_lines(r, mask, curedge)
1559 outmask=fl.get_mask()
1560 f.set_scan(workscan, fl.get_mask())
1561 f.x = workscan._getabcissa(r)
1562 f.y = workscan._getspectrum(r)
1563 f.data = None
1564 f.fit()
1565
1566 # Show mask list
1567 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1568 msg = "mask range: "+str(masklist)
1569 asaplog.push(msg, False)
1570
1571 fpar = f.get_parameters()
1572 if plot:
1573 f.plot(residual=True)
1574 x = raw_input("Accept fit ( [y]/n ): ")
1575 if x.upper() == 'N':
1576 self.blpars.append(None)
1577 self.masklists.append(None)
1578 continue
1579 workscan._setspectrum(f.fitter.getresidual(), r)
1580 self.blpars.append(fpar)
1581 self.masklists.append(masklist)
1582 if plot:
1583 f._p.unmap()
1584 f._p = None
1585 workscan._add_history("auto_poly_baseline", varlist)
1586 if insitu:
1587 self._assign(workscan)
1588 else:
1589 return workscan
1590
1591 def rotate_linpolphase(self, angle):
1592 """
1593 Rotate the phase of the complex polarization O=Q+iU correlation.
1594 This is always done in situ in the raw data. So if you call this
1595 function more than once then each call rotates the phase further.
1596 Parameters:
1597 angle: The angle (degrees) to rotate (add) by.
1598 Examples:
1599 scan.rotate_linpolphase(2.3)
1600 """
1601 varlist = vars()
1602 self._math._rotate_linpolphase(self, angle)
1603 self._add_history("rotate_linpolphase", varlist)
1604 print_log()
1605 return
1606
1607
1608 def rotate_xyphase(self, angle):
1609 """
1610 Rotate the phase of the XY correlation. This is always done in situ
1611 in the data. So if you call this function more than once
1612 then each call rotates the phase further.
1613 Parameters:
1614 angle: The angle (degrees) to rotate (add) by.
1615 Examples:
1616 scan.rotate_xyphase(2.3)
1617 """
1618 varlist = vars()
1619 self._math._rotate_xyphase(self, angle)
1620 self._add_history("rotate_xyphase", varlist)
1621 print_log()
1622 return
1623
1624 def swap_linears(self):
1625 """
1626 Swap the linear polarisations XX and YY, or better the first two
1627 polarisations as this also works for ciculars.
1628 """
1629 varlist = vars()
1630 self._math._swap_linears(self)
1631 self._add_history("swap_linears", varlist)
1632 print_log()
1633 return
1634
1635 def invert_phase(self):
1636 """
1637 Invert the phase of the complex polarisation
1638 """
1639 varlist = vars()
1640 self._math._invert_phase(self)
1641 self._add_history("invert_phase", varlist)
1642 print_log()
1643 return
1644
1645 def add(self, offset, insitu=None):
1646 """
1647 Return a scan where all spectra have the offset added
1648 Parameters:
1649 offset: the offset
1650 insitu: if False a new scantable is returned.
1651 Otherwise, the scaling is done in-situ
1652 The default is taken from .asaprc (False)
1653 """
1654 if insitu is None: insitu = rcParams['insitu']
1655 self._math._setinsitu(insitu)
1656 varlist = vars()
1657 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1658 s._add_history("add", varlist)
1659 print_log()
1660 if insitu:
1661 self._assign(s)
1662 else:
1663 return s
1664
1665 def scale(self, factor, tsys=True, insitu=None):
1666 """
1667 Return a scan where all spectra are scaled by the give 'factor'
1668 Parameters:
1669 factor: the scaling factor
1670 insitu: if False a new scantable is returned.
1671 Otherwise, the scaling is done in-situ
1672 The default is taken from .asaprc (False)
1673 tsys: if True (default) then apply the operation to Tsys
1674 as well as the data
1675 """
1676 if insitu is None: insitu = rcParams['insitu']
1677 self._math._setinsitu(insitu)
1678 varlist = vars()
1679 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1680 s._add_history("scale", varlist)
1681 print_log()
1682 if insitu:
1683 self._assign(s)
1684 else:
1685 return s
1686
1687 def auto_quotient(self, preserve=True, mode='paired'):
1688 """
1689 This function allows to build quotients automatically.
1690 It assumes the observation to have the same numer of
1691 "ons" and "offs"
1692 Parameters:
1693 preserve: you can preserve (default) the continuum or
1694 remove it. The equations used are
1695 preserve: Output = Toff * (on/off) - Toff
1696 remove: Output = Toff * (on/off) - Ton
1697 mode: the on/off detection mode
1698 'paired' (default)
1699 identifies 'off' scans by the
1700 trailing '_R' (Mopra/Parkes) or
1701 '_e'/'_w' (Tid) and matches
1702 on/off pairs from the observing pattern
1703 'time'
1704 finds the closest off in time
1705
1706 """
1707 modes = ["time", "paired"]
1708 if not mode in modes:
1709 msg = "please provide valid mode. Valid modes are %s" % (modes)
1710 raise ValueError(msg)
1711 varlist = vars()
1712 s = None
1713 if mode.lower() == "paired":
1714 basesel = self.get_selection()
1715 sel = selector()+basesel
1716 sel.set_query("SRCTYPE==1")
1717 self.set_selection(sel)
1718 offs = self.copy()
1719 sel.set_query("SRCTYPE==0")
1720 self.set_selection(sel)
1721 ons = self.copy()
1722 s = scantable(self._math._quotient(ons, offs, preserve))
1723 self.set_selection(basesel)
1724 elif mode.lower() == "time":
1725 s = scantable(self._math._auto_quotient(self, mode, preserve))
1726 s._add_history("auto_quotient", varlist)
1727 print_log()
1728 return s
1729
1730 def mx_quotient(self, mask = None, weight='median', preserve=True):
1731 """
1732 Form a quotient using "off" beams when observing in "MX" mode.
1733 Parameters:
1734 mask: an optional mask to be used when weight == 'stddev'
1735 weight: How to average the off beams. Default is 'median'.
1736 preserve: you can preserve (default) the continuum or
1737 remove it. The equations used are
1738 preserve: Output = Toff * (on/off) - Toff
1739 remove: Output = Toff * (on/off) - Ton
1740 """
1741 if mask is None: mask = ()
1742 varlist = vars()
1743 on = scantable(self._math._mx_extract(self, 'on'))
1744 preoff = scantable(self._math._mx_extract(self, 'off'))
1745 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1746 from asapmath import quotient
1747 q = quotient(on, off, preserve)
1748 q._add_history("mx_quotient", varlist)
1749 print_log()
1750 return q
1751
1752 def freq_switch(self, insitu=None):
1753 """
1754 Apply frequency switching to the data.
1755 Parameters:
1756 insitu: if False a new scantable is returned.
1757 Otherwise, the swictching is done in-situ
1758 The default is taken from .asaprc (False)
1759 Example:
1760 none
1761 """
1762 if insitu is None: insitu = rcParams['insitu']
1763 self._math._setinsitu(insitu)
1764 varlist = vars()
1765 s = scantable(self._math._freqswitch(self))
1766 s._add_history("freq_switch", varlist)
1767 print_log()
1768 if insitu: self._assign(s)
1769 else: return s
1770
1771 def recalc_azel(self):
1772 """
1773 Recalculate the azimuth and elevation for each position.
1774 Parameters:
1775 none
1776 Example:
1777 """
1778 varlist = vars()
1779 self._recalcazel()
1780 self._add_history("recalc_azel", varlist)
1781 print_log()
1782 return
1783
1784 def __add__(self, other):
1785 varlist = vars()
1786 s = None
1787 if isinstance(other, scantable):
1788 s = scantable(self._math._binaryop(self, other, "ADD"))
1789 elif isinstance(other, float):
1790 s = scantable(self._math._unaryop(self, other, "ADD", False))
1791 else:
1792 raise TypeError("Other input is not a scantable or float value")
1793 s._add_history("operator +", varlist)
1794 print_log()
1795 return s
1796
1797 def __sub__(self, other):
1798 """
1799 implicit on all axes and on Tsys
1800 """
1801 varlist = vars()
1802 s = None
1803 if isinstance(other, scantable):
1804 s = scantable(self._math._binaryop(self, other, "SUB"))
1805 elif isinstance(other, float):
1806 s = scantable(self._math._unaryop(self, other, "SUB", False))
1807 else:
1808 raise TypeError("Other input is not a scantable or float value")
1809 s._add_history("operator -", varlist)
1810 print_log()
1811 return s
1812
1813 def __mul__(self, other):
1814 """
1815 implicit on all axes and on Tsys
1816 """
1817 varlist = vars()
1818 s = None
1819 if isinstance(other, scantable):
1820 s = scantable(self._math._binaryop(self, other, "MUL"))
1821 elif isinstance(other, float):
1822 s = scantable(self._math._unaryop(self, other, "MUL", False))
1823 else:
1824 raise TypeError("Other input is not a scantable or float value")
1825 s._add_history("operator *", varlist)
1826 print_log()
1827 return s
1828
1829
1830 def __div__(self, other):
1831 """
1832 implicit on all axes and on Tsys
1833 """
1834 varlist = vars()
1835 s = None
1836 if isinstance(other, scantable):
1837 s = scantable(self._math._binaryop(self, other, "DIV"))
1838 elif isinstance(other, float):
1839 if other == 0.0:
1840 raise ZeroDivisionError("Dividing by zero is not recommended")
1841 s = scantable(self._math._unaryop(self, other, "DIV", False))
1842 else:
1843 raise TypeError("Other input is not a scantable or float value")
1844 s._add_history("operator /", varlist)
1845 print_log()
1846 return s
1847
1848 def get_fit(self, row=0):
1849 """
1850 Print or return the stored fits for a row in the scantable
1851 Parameters:
1852 row: the row which the fit has been applied to.
1853 """
1854 if row > self.nrow():
1855 return
1856 from asap.asapfit import asapfit
1857 fit = asapfit(self._getfit(row))
1858 if rcParams['verbose']:
1859 print fit
1860 return
1861 else:
1862 return fit.as_dict()
1863
1864 def _add_history(self, funcname, parameters):
1865 # create date
1866 sep = "##"
1867 from datetime import datetime
1868 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1869 hist = dstr+sep
1870 hist += funcname+sep#cdate+sep
1871 if parameters.has_key('self'): del parameters['self']
1872 for k, v in parameters.iteritems():
1873 if type(v) is dict:
1874 for k2, v2 in v.iteritems():
1875 hist += k2
1876 hist += "="
1877 if isinstance(v2, scantable):
1878 hist += 'scantable'
1879 elif k2 == 'mask':
1880 if isinstance(v2, list) or isinstance(v2, tuple):
1881 hist += str(self._zip_mask(v2))
1882 else:
1883 hist += str(v2)
1884 else:
1885 hist += str(v2)
1886 else:
1887 hist += k
1888 hist += "="
1889 if isinstance(v, scantable):
1890 hist += 'scantable'
1891 elif k == 'mask':
1892 if isinstance(v, list) or isinstance(v, tuple):
1893 hist += str(self._zip_mask(v))
1894 else:
1895 hist += str(v)
1896 else:
1897 hist += str(v)
1898 hist += sep
1899 hist = hist[:-2] # remove trailing '##'
1900 self._addhistory(hist)
1901
1902
1903 def _zip_mask(self, mask):
1904 mask = list(mask)
1905 i = 0
1906 segments = []
1907 while mask[i:].count(1):
1908 i += mask[i:].index(1)
1909 if mask[i:].count(0):
1910 j = i + mask[i:].index(0)
1911 else:
1912 j = len(mask)
1913 segments.append([i, j])
1914 i = j
1915 return segments
1916
1917 def _get_ordinate_label(self):
1918 fu = "("+self.get_fluxunit()+")"
1919 import re
1920 lbl = "Intensity"
1921 if re.match(".K.", fu):
1922 lbl = "Brightness Temperature "+ fu
1923 elif re.match(".Jy.", fu):
1924 lbl = "Flux density "+ fu
1925 return lbl
1926
1927 def _check_ifs(self):
1928 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1929 nchans = filter(lambda t: t > 0, nchans)
1930 return (sum(nchans)/len(nchans) == nchans[0])
1931
1932 def _fill(self, names, unit, average, getpt):
1933 import os
1934 from asap._asap import stfiller
1935 first = True
1936 fullnames = []
1937 for name in names:
1938 name = os.path.expandvars(name)
1939 name = os.path.expanduser(name)
1940 if not os.path.exists(name):
1941 msg = "File '%s' does not exists" % (name)
1942 if rcParams['verbose']:
1943 asaplog.push(msg)
1944 print asaplog.pop().strip()
1945 return
1946 raise IOError(msg)
1947 fullnames.append(name)
1948 if average:
1949 asaplog.push('Auto averaging integrations')
1950 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1951 for name in fullnames:
1952 tbl = Scantable(stype)
1953 r = stfiller(tbl)
1954 msg = "Importing %s..." % (name)
1955 asaplog.push(msg, False)
1956 print_log()
1957 r._open(name, -1, -1, getpt)
1958 r._read()
1959 #tbl = r._getdata()
1960 if average:
1961 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1962 #tbl = tbl2
1963 if not first:
1964 tbl = self._math._merge([self, tbl])
1965 #tbl = tbl2
1966 Scantable.__init__(self, tbl)
1967 r._close()
1968 del r, tbl
1969 first = False
1970 if unit is not None:
1971 self.set_fluxunit(unit)
1972 #self.set_freqframe(rcParams['scantable.freqframe'])
1973
Note: See TracBrowser for help on using the repository browser.