source: branches/alma/python/scantable.py@ 1516

Last change on this file since 1516 was 1515, checked in by Kana Sugimoto, 16 years ago

New Development: No

JIRA Issue: Yes (CAS-1079)

Ready to Release: Yes

Interface Changes: Yes

What Interface Changed:
You can select stat='minpos' or 'maxpos' in scantable.stats() to
get minimum or maximum value with its channel/frequency/velocity.
Also a new method pos2data(self, rowno=0, pos=0) is added.

Test Programs:

Run scantable.stats() with stat='minpos' or 'maxpos'.

Put in Release Notes: No

Module(s): Module Names change impacts.

Description:

You can get min/max values with their position (channels/frequencies/velocities)
by selecting stat='minpos' or 'maxpos'.

The new method pos2data() returns abcissa and ordinate values of a spectrum
at an arbitrary row (rowno) and channel (pos) in the scantable.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 73.8 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None, getpt=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 if getpt is None:
36 getpt = False
37 varlist = vars()
38 from asap._asap import stmath
39 self._math = stmath()
40 if isinstance(filename, Scantable):
41 Scantable.__init__(self, filename)
42 else:
43 if isinstance(filename, str):
44 import os.path
45 filename = os.path.expandvars(filename)
46 filename = os.path.expanduser(filename)
47 if not os.path.exists(filename):
48 s = "File '%s' not found." % (filename)
49 if rcParams['verbose']:
50 asaplog.push(s)
51 print asaplog.pop().strip()
52 return
53 raise IOError(s)
54 if os.path.isdir(filename) \
55 and not os.path.exists(filename+'/table.f1'):
56 # crude check if asap table
57 if os.path.exists(filename+'/table.info'):
58 ondisk = rcParams['scantable.storage'] == 'disk'
59 Scantable.__init__(self, filename, ondisk)
60 if unit is not None:
61 self.set_fluxunit(unit)
62 # do not reset to the default freqframe
63 #self.set_freqframe(rcParams['scantable.freqframe'])
64 else:
65 msg = "The given file '%s'is not a valid " \
66 "asap table." % (filename)
67 if rcParams['verbose']:
68 print msg
69 return
70 else:
71 raise IOError(msg)
72 else:
73 self._fill([filename], unit, average, getpt)
74 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
75 and isinstance(filename[-1], str):
76 self._fill(filename, unit, average, getpt)
77 self._add_history("scantable", varlist)
78 print_log()
79
80 def save(self, name=None, format=None, overwrite=False):
81 """
82 Store the scantable on disk. This can be an asap (aips++) Table,
83 SDFITS or MS2 format.
84 Parameters:
85 name: the name of the outputfile. For format "ASCII"
86 this is the root file name (data in 'name'.txt
87 and header in 'name'_header.txt)
88 format: an optional file format. Default is ASAP.
89 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
90 'SDFITS' (save as SDFITS file)
91 'ASCII' (saves as ascii text file)
92 'MS2' (saves as an aips++
93 MeasurementSet V2)
94 overwrite: If the file should be overwritten if it exists.
95 The default False is to return with warning
96 without writing the output. USE WITH CARE.
97 Example:
98 scan.save('myscan.asap')
99 scan.save('myscan.sdfits', 'SDFITS')
100 """
101 from os import path
102 if format is None: format = rcParams['scantable.save']
103 suffix = '.'+format.lower()
104 if name is None or name == "":
105 name = 'scantable'+suffix
106 msg = "No filename given. Using default name %s..." % name
107 asaplog.push(msg)
108 name = path.expandvars(name)
109 if path.isfile(name) or path.isdir(name):
110 if not overwrite:
111 msg = "File %s exists." % name
112 if rcParams['verbose']:
113 print msg
114 return
115 else:
116 raise IOError(msg)
117 format2 = format.upper()
118 if format2 == 'ASAP':
119 self._save(name)
120 else:
121 from asap._asap import stwriter as stw
122 writer = stw(format2)
123 writer.write(self, name)
124 print_log()
125 return
126
127 def copy(self):
128 """
129 Return a copy of this scantable.
130 Note:
131 This makes a full (deep) copy. scan2 = scan1 makes a reference.
132 Parameters:
133 none
134 Example:
135 copiedscan = scan.copy()
136 """
137 sd = scantable(Scantable._copy(self))
138 return sd
139
140 def drop_scan(self, scanid=None):
141 """
142 Return a new scantable where the specified scan number(s) has(have)
143 been dropped.
144 Parameters:
145 scanid: a (list of) scan number(s)
146 """
147 from asap import _is_sequence_or_number as _is_valid
148 from asap import _to_list
149 from asap import unique
150 if not _is_valid(scanid):
151 if rcParams['verbose']:
152 print "Please specify a scanno to drop from the scantable"
153 return
154 else:
155 raise RuntimeError("No scan given")
156 try:
157 scanid = _to_list(scanid)
158 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
159 for sid in scanid: allscans.remove(sid)
160 if len(allscans) == 0:
161 raise ValueError("Can't remove all scans")
162 except ValueError:
163 if rcParams['verbose']:
164 print "Couldn't find any match."
165 return
166 else: raise
167 try:
168 bsel = self.get_selection()
169 sel = selector()
170 sel.set_scans(allscans)
171 self.set_selection(bsel+sel)
172 scopy = self._copy()
173 self.set_selection(bsel)
174 return scantable(scopy)
175 except RuntimeError:
176 if rcParams['verbose']:
177 print "Couldn't find any match."
178 else:
179 raise
180
181
182 def get_scan(self, scanid=None):
183 """
184 Return a specific scan (by scanno) or collection of scans (by
185 source name) in a new scantable.
186 Note:
187 See scantable.drop_scan() for the inverse operation.
188 Parameters:
189 scanid: a (list of) scanno or a source name, unix-style
190 patterns are accepted for source name matching, e.g.
191 '*_R' gets all 'ref scans
192 Example:
193 # get all scans containing the source '323p459'
194 newscan = scan.get_scan('323p459')
195 # get all 'off' scans
196 refscans = scan.get_scan('*_R')
197 # get a susbset of scans by scanno (as listed in scan.summary())
198 newscan = scan.get_scan([0, 2, 7, 10])
199 """
200 if scanid is None:
201 if rcParams['verbose']:
202 print "Please specify a scan no or name to " \
203 "retrieve from the scantable"
204 return
205 else:
206 raise RuntimeError("No scan given")
207
208 try:
209 bsel = self.get_selection()
210 sel = selector()
211 if type(scanid) is str:
212 sel.set_name(scanid)
213 self.set_selection(bsel+sel)
214 scopy = self._copy()
215 self.set_selection(bsel)
216 return scantable(scopy)
217 elif type(scanid) is int:
218 sel.set_scans([scanid])
219 self.set_selection(bsel+sel)
220 scopy = self._copy()
221 self.set_selection(bsel)
222 return scantable(scopy)
223 elif type(scanid) is list:
224 sel.set_scans(scanid)
225 self.set_selection(sel)
226 scopy = self._copy()
227 self.set_selection(bsel)
228 return scantable(scopy)
229 else:
230 msg = "Illegal scanid type, use 'int' or 'list' if ints."
231 if rcParams['verbose']:
232 print msg
233 else:
234 raise TypeError(msg)
235 except RuntimeError:
236 if rcParams['verbose']: print "Couldn't find any match."
237 else: raise
238
239 def __str__(self):
240 return Scantable._summary(self, True)
241
242 def summary(self, filename=None):
243 """
244 Print a summary of the contents of this scantable.
245 Parameters:
246 filename: the name of a file to write the putput to
247 Default - no file output
248 verbose: print extra info such as the frequency table
249 The default (False) is taken from .asaprc
250 """
251 info = Scantable._summary(self, True)
252 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
253 if filename is not None:
254 if filename is "":
255 filename = 'scantable_summary.txt'
256 from os.path import expandvars, isdir
257 filename = expandvars(filename)
258 if not isdir(filename):
259 data = open(filename, 'w')
260 data.write(info)
261 data.close()
262 else:
263 msg = "Illegal file name '%s'." % (filename)
264 if rcParams['verbose']:
265 print msg
266 else:
267 raise IOError(msg)
268 if rcParams['verbose']:
269 try:
270 from IPython.genutils import page as pager
271 except ImportError:
272 from pydoc import pager
273 pager(info)
274 else:
275 return info
276
277
278 def get_selection(self):
279 """
280 Get the selection object currently set on this scantable.
281 Parameters:
282 none
283 Example:
284 sel = scan.get_selection()
285 sel.set_ifs(0) # select IF 0
286 scan.set_selection(sel) # apply modified selection
287 """
288 return selector(self._getselection())
289
290 def set_selection(self, selection=selector()):
291 """
292 Select a subset of the data. All following operations on this scantable
293 are only applied to thi selection.
294 Parameters:
295 selection: a selector object (default unset the selection)
296 Examples:
297 sel = selector() # create a selection object
298 self.set_scans([0, 3]) # select SCANNO 0 and 3
299 scan.set_selection(sel) # set the selection
300 scan.summary() # will only print summary of scanno 0 an 3
301 scan.set_selection() # unset the selection
302 """
303 self._setselection(selection)
304
305 def get_row(self, row=0, insitu=None):
306 """
307 Select a row in the scantable.
308 Return a scantable with single row.
309 Parameters:
310 row: row no of integration, default is 0.
311 insitu: if False a new scantable is returned.
312 Otherwise, the scaling is done in-situ
313 The default is taken from .asaprc (False)
314 """
315 if insitu is None: insitu = rcParams['insitu']
316 if not insitu:
317 workscan = self.copy()
318 else:
319 workscan = self
320 # Select a row
321 sel=selector()
322 sel.set_scans([workscan.getscan(row)])
323 sel.set_cycles([workscan.getcycle(row)])
324 sel.set_beams([workscan.getbeam(row)])
325 sel.set_ifs([workscan.getif(row)])
326 sel.set_polarisations([workscan.getpol(row)])
327 sel.set_name(workscan._getsourcename(row))
328 workscan.set_selection(sel)
329 if not workscan.nrow() == 1:
330 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
331 raise RuntimeError(msg)
332 del sel
333 if insitu:
334 self._assign(workscan)
335 else:
336 return workscan
337
338 def stats(self, stat='stddev', mask=None):
339 """
340 Determine the specified statistic of the current beam/if/pol
341 Takes a 'mask' as an optional parameter to specify which
342 channels should be excluded.
343 You can get min/max values with their position
344 (channels/frequencies/velocities) by selecting stat='minpos'
345 or 'maxpos'.
346 Parameters:
347 stat: 'min', 'max', 'minpos', 'maxpos', 'sumsq', 'sum',
348 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
349 mask: an optional mask specifying where the statistic
350 should be determined.
351 Example:
352 scan.set_unit('channel')
353 msk = scan.create_mask([100, 200], [500, 600])
354 scan.stats(stat='mean', mask=m)
355 """
356 if mask == None:
357 mask = []
358 axes = ['Beam', 'IF', 'Pol', 'Time']
359 if not self._check_ifs():
360 raise ValueError("Cannot apply mask as the IFs have different "
361 "number of channels. Please use setselection() "
362 "to select individual IFs")
363
364 if stat.lower().find('pos') == -1:
365 statvals = self._math._stats(self, mask, stat)
366 position = False
367 sstat = str(stat)
368 else:
369 pos = self._math._minmaxpos(self, mask, stat)
370 position = True
371 statvals = []
372 sstat = stat.lower().strip('pos')
373 out = ''
374 axes = []
375 for i in range(self.nrow()):
376 axis = []
377 axis.append(self.getscan(i))
378 axis.append(self.getbeam(i))
379 axis.append(self.getif(i))
380 axis.append(self.getpol(i))
381 axis.append(self.getcycle(i))
382 axes.append(axis)
383 tm = self._gettime(i)
384 src = self._getsourcename(i)
385 xpos = ''
386 if position:
387 qx, y = self.pos2data(rowno=i, pos=pos[i])
388 statvals.append(y)
389 xpos = '(x = %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
390 out += 'Scan[%d] (%s) ' % (axis[0], src)
391 out += 'Time[%s]:\n' % (tm)
392 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
393 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
394 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
395 out += '= %3.3f ' % (statvals[i]) +xpos+'\n'
396 out += "--------------------------------------------------\n"
397
398 if rcParams['verbose']:
399 print "--------------------------------------------------"
400 print " ", sstat
401 print "--------------------------------------------------"
402 print out
403 #else:
404 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
405 # 'axes' : axes,
406 # 'data': statvals}
407 return statvals
408
409 def pos2data(self, rowno=0, pos=0):
410 """
411 Returns the abcissa and ordinate value of a spectrum
412 at an arbitrary row and channel in the scantable.
413 Parameters:
414 rowno: a row number in the scantable. Default is the
415 first row, i.e. rowno=0
416 pos: a channel in the scantable. Default is the first
417 channel, i.e. pos=0
418 """
419 if isinstance(rowno, int) and isinstance(pos, int):
420 x, xlbl = self.get_abcissa(rowno)
421 qx = {'unit': xlbl, 'value': x[pos]}
422 return qx, self._getspectrum(rowno)[pos]
423
424 def stddev(self, mask=None):
425 """
426 Determine the standard deviation of the current beam/if/pol
427 Takes a 'mask' as an optional parameter to specify which
428 channels should be excluded.
429 Parameters:
430 mask: an optional mask specifying where the standard
431 deviation should be determined.
432
433 Example:
434 scan.set_unit('channel')
435 msk = scan.create_mask([100, 200], [500, 600])
436 scan.stddev(mask=m)
437 """
438 return self.stats(stat='stddev', mask=mask);
439
440
441 def get_column_names(self):
442 """
443 Return a list of column names, which can be used for selection.
444 """
445 return list(Scantable.get_column_names(self))
446
447 def get_tsys(self):
448 """
449 Return the System temperatures.
450 Returns:
451 a list of Tsys values for the current selection
452 """
453
454 return self._row_callback(self._gettsys, "Tsys")
455
456 def _row_callback(self, callback, label):
457 axes = []
458 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
459 out = ""
460 outvec = []
461 for i in range(self.nrow()):
462 axis = []
463 axis.append(self.getscan(i))
464 axis.append(self.getbeam(i))
465 axis.append(self.getif(i))
466 axis.append(self.getpol(i))
467 axis.append(self.getcycle(i))
468 axes.append(axis)
469 tm = self._gettime(i)
470 src = self._getsourcename(i)
471 out += 'Scan[%d] (%s) ' % (axis[0], src)
472 out += 'Time[%s]:\n' % (tm)
473 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
474 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
475 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
476 outvec.append(callback(i))
477 out += '= %3.3f\n' % (outvec[i])
478 out += "--------------------------------------------------\n"
479 if rcParams['verbose']:
480 print "--------------------------------------------------"
481 print " %s" % (label)
482 print "--------------------------------------------------"
483 print out
484 # disabled because the vector seems more useful
485 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
486 return outvec
487
488 def _get_column(self, callback, row=-1):
489 """
490 """
491 if row == -1:
492 return [callback(i) for i in range(self.nrow())]
493 else:
494 if 0 <= row < self.nrow():
495 return callback(row)
496
497
498 def get_time(self, row=-1, asdatetime=False):
499 """
500 Get a list of time stamps for the observations.
501 Return a datetime object for each integration time stamp in the scantable.
502 Parameters:
503 row: row no of integration. Default -1 return all rows
504 asdatetime: return values as datetime objects rather than strings
505 Example:
506 none
507 """
508 from time import strptime
509 from datetime import datetime
510 times = self._get_column(self._gettime, row)
511 if not asdatetime:
512 return times
513 format = "%Y/%m/%d/%H:%M:%S"
514 if isinstance(times, list):
515 return [datetime(*strptime(i, format)[:6]) for i in times]
516 else:
517 return datetime(*strptime(times, format)[:6])
518
519
520 def get_inttime(self, row=-1):
521 """
522 Get a list of integration times for the observations.
523 Return a time in seconds for each integration in the scantable.
524 Parameters:
525 row: row no of integration. Default -1 return all rows.
526 Example:
527 none
528 """
529 return self._get_column(self._getinttime, row)
530
531
532 def get_sourcename(self, row=-1):
533 """
534 Get a list source names for the observations.
535 Return a string for each integration in the scantable.
536 Parameters:
537 row: row no of integration. Default -1 return all rows.
538 Example:
539 none
540 """
541 return self._get_column(self._getsourcename, row)
542
543 def get_elevation(self, row=-1):
544 """
545 Get a list of elevations for the observations.
546 Return a float for each integration in the scantable.
547 Parameters:
548 row: row no of integration. Default -1 return all rows.
549 Example:
550 none
551 """
552 return self._get_column(self._getelevation, row)
553
554 def get_azimuth(self, row=-1):
555 """
556 Get a list of azimuths for the observations.
557 Return a float for each integration in the scantable.
558 Parameters:
559 row: row no of integration. Default -1 return all rows.
560 Example:
561 none
562 """
563 return self._get_column(self._getazimuth, row)
564
565 def get_parangle(self, row=-1):
566 """
567 Get a list of parallactic angles for the observations.
568 Return a float for each integration in the scantable.
569 Parameters:
570 row: row no of integration. Default -1 return all rows.
571 Example:
572 none
573 """
574 return self._get_column(self._getparangle, row)
575
576 def get_direction(self, row=-1):
577 """
578 Get a list of Positions on the sky (direction) for the observations.
579 Return a float for each integration in the scantable.
580 Parameters:
581 row: row no of integration. Default -1 return all rows
582 Example:
583 none
584 """
585 return self._get_column(self._getdirection, row)
586
587 def get_directionval(self, row=-1):
588 """
589 Get a list of Positions on the sky (direction) for the observations.
590 Return a float for each integration in the scantable.
591 Parameters:
592 row: row no of integration. Default -1 return all rows
593 Example:
594 none
595 """
596 return self._get_column(self._getdirectionvec, row)
597
598 def set_unit(self, unit='channel'):
599 """
600 Set the unit for all following operations on this scantable
601 Parameters:
602 unit: optional unit, default is 'channel'
603 one of '*Hz', 'km/s', 'channel', ''
604 """
605 varlist = vars()
606 if unit in ['', 'pixel', 'channel']:
607 unit = ''
608 inf = list(self._getcoordinfo())
609 inf[0] = unit
610 self._setcoordinfo(inf)
611 self._add_history("set_unit", varlist)
612
613 def set_instrument(self, instr):
614 """
615 Set the instrument for subsequent processing.
616 Parameters:
617 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
618 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
619 """
620 self._setInstrument(instr)
621 self._add_history("set_instument", vars())
622 print_log()
623
624 def set_feedtype(self, feedtype):
625 """
626 Overwrite the feed type, which might not be set correctly.
627 Parameters:
628 feedtype: 'linear' or 'circular'
629 """
630 self._setfeedtype(feedtype)
631 self._add_history("set_feedtype", vars())
632 print_log()
633
634 def set_doppler(self, doppler='RADIO'):
635 """
636 Set the doppler for all following operations on this scantable.
637 Parameters:
638 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
639 """
640 varlist = vars()
641 inf = list(self._getcoordinfo())
642 inf[2] = doppler
643 self._setcoordinfo(inf)
644 self._add_history("set_doppler", vars())
645 print_log()
646
647 def set_freqframe(self, frame=None):
648 """
649 Set the frame type of the Spectral Axis.
650 Parameters:
651 frame: an optional frame type, default 'LSRK'. Valid frames are:
652 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
653 'GEO', 'GALACTO', 'LGROUP', 'CMB'
654 Examples:
655 scan.set_freqframe('BARY')
656 """
657 if frame is None: frame = rcParams['scantable.freqframe']
658 varlist = vars()
659 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
660 'GEO', 'GALACTO', 'LGROUP', 'CMB']
661
662 if frame in valid:
663 inf = list(self._getcoordinfo())
664 inf[1] = frame
665 self._setcoordinfo(inf)
666 self._add_history("set_freqframe", varlist)
667 else:
668 msg = "Please specify a valid freq type. Valid types are:\n", valid
669 if rcParams['verbose']:
670 print msg
671 else:
672 raise TypeError(msg)
673 print_log()
674
675 def set_dirframe(self, frame=""):
676 """
677 Set the frame type of the Direction on the sky.
678 Parameters:
679 frame: an optional frame type, default ''. Valid frames are:
680 'J2000', 'B1950', 'GALACTIC'
681 Examples:
682 scan.set_dirframe('GALACTIC')
683 """
684 varlist = vars()
685 try:
686 Scantable.set_dirframe(self, frame)
687 except RuntimeError, msg:
688 if rcParams['verbose']:
689 print msg
690 else:
691 raise
692 self._add_history("set_dirframe", varlist)
693
694 def get_unit(self):
695 """
696 Get the default unit set in this scantable
697 Returns:
698 A unit string
699 """
700 inf = self._getcoordinfo()
701 unit = inf[0]
702 if unit == '': unit = 'channel'
703 return unit
704
705 def get_abcissa(self, rowno=0):
706 """
707 Get the abcissa in the current coordinate setup for the currently
708 selected Beam/IF/Pol
709 Parameters:
710 rowno: an optional row number in the scantable. Default is the
711 first row, i.e. rowno=0
712 Returns:
713 The abcissa values and the format string (as a dictionary)
714 """
715 abc = self._getabcissa(rowno)
716 lbl = self._getabcissalabel(rowno)
717 print_log()
718 return abc, lbl
719
720 def flag(self, mask=None, unflag=False):
721 """
722 Flag the selected data using an optional channel mask.
723 Parameters:
724 mask: an optional channel mask, created with create_mask. Default
725 (no mask) is all channels.
726 unflag: if True, unflag the data
727 """
728 varlist = vars()
729 if mask is None:
730 mask = []
731 try:
732 self._flag(mask, unflag)
733 except RuntimeError, msg:
734 if rcParams['verbose']:
735 print msg
736 return
737 else: raise
738 self._add_history("flag", varlist)
739
740 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
741 """
742 Flag the data in 'lag' space by providing a frequency to remove.
743 Flagged data in the scantable gets set to 0.0 before the fft.
744 No taper is applied.
745 Parameters:
746 frequency: the frequency (really a period within the bandwidth)
747 to remove
748 width: the width of the frequency to remove, to remove a
749 range of frequencies aroung the centre.
750 unit: the frequency unit (default "GHz")
751 Notes:
752 It is recommended to flag edges of the band or strong
753 signals beforehand.
754 """
755 if insitu is None: insitu = rcParams['insitu']
756 self._math._setinsitu(insitu)
757 varlist = vars()
758 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
759 if not base.has_key(unit):
760 raise ValueError("%s is not a valid unit." % unit)
761 try:
762 s = scantable(self._math._lag_flag(self, frequency*base[unit],
763 width*base[unit]))
764 except RuntimeError, msg:
765 if rcParams['verbose']:
766 print msg
767 return
768 else: raise
769 s._add_history("lag_flag", varlist)
770 print_log()
771 if insitu:
772 self._assign(s)
773 else:
774 return s
775
776
777 def create_mask(self, *args, **kwargs):
778 """
779 Compute and return a mask based on [min, max] windows.
780 The specified windows are to be INCLUDED, when the mask is
781 applied.
782 Parameters:
783 [min, max], [min2, max2], ...
784 Pairs of start/end points (inclusive)specifying the regions
785 to be masked
786 invert: optional argument. If specified as True,
787 return an inverted mask, i.e. the regions
788 specified are EXCLUDED
789 row: create the mask using the specified row for
790 unit conversions, default is row=0
791 only necessary if frequency varies over rows.
792 Example:
793 scan.set_unit('channel')
794 a)
795 msk = scan.create_mask([400, 500], [800, 900])
796 # masks everything outside 400 and 500
797 # and 800 and 900 in the unit 'channel'
798
799 b)
800 msk = scan.create_mask([400, 500], [800, 900], invert=True)
801 # masks the regions between 400 and 500
802 # and 800 and 900 in the unit 'channel'
803 c)
804 mask only channel 400
805 msk = scan.create_mask([400, 400])
806 """
807 row = 0
808 if kwargs.has_key("row"):
809 row = kwargs.get("row")
810 data = self._getabcissa(row)
811 u = self._getcoordinfo()[0]
812 if rcParams['verbose']:
813 if u == "": u = "channel"
814 msg = "The current mask window unit is %s" % u
815 i = self._check_ifs()
816 if not i:
817 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
818 asaplog.push(msg)
819 n = self.nchan()
820 msk = _n_bools(n, False)
821 # test if args is a 'list' or a 'normal *args - UGLY!!!
822
823 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
824 and args or args[0]
825 for window in ws:
826 if (len(window) != 2 or window[0] > window[1] ):
827 raise TypeError("A window needs to be defined as [min, max]")
828 for i in range(n):
829 if data[i] >= window[0] and data[i] <= window[1]:
830 msk[i] = True
831 if kwargs.has_key('invert'):
832 if kwargs.get('invert'):
833 msk = mask_not(msk)
834 print_log()
835 return msk
836
837 def get_masklist(self, mask=None, row=0):
838 """
839 Compute and return a list of mask windows, [min, max].
840 Parameters:
841 mask: channel mask, created with create_mask.
842 row: calcutate the masklist using the specified row
843 for unit conversions, default is row=0
844 only necessary if frequency varies over rows.
845 Returns:
846 [min, max], [min2, max2], ...
847 Pairs of start/end points (inclusive)specifying
848 the masked regions
849 """
850 if not (isinstance(mask,list) or isinstance(mask, tuple)):
851 raise TypeError("The mask should be list or tuple.")
852 if len(mask) < 2:
853 raise TypeError("The mask elements should be > 1")
854 if self.nchan() != len(mask):
855 msg = "Number of channels in scantable != number of mask elements"
856 raise TypeError(msg)
857 data = self._getabcissa(row)
858 u = self._getcoordinfo()[0]
859 if rcParams['verbose']:
860 if u == "": u = "channel"
861 msg = "The current mask window unit is %s" % u
862 i = self._check_ifs()
863 if not i:
864 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
865 asaplog.push(msg)
866 masklist=[]
867 ist, ien = None, None
868 ist, ien=self.get_mask_indices(mask)
869 if ist is not None and ien is not None:
870 for i in xrange(len(ist)):
871 range=[data[ist[i]],data[ien[i]]]
872 range.sort()
873 masklist.append([range[0],range[1]])
874 return masklist
875
876 def get_mask_indices(self, mask=None):
877 """
878 Compute and Return lists of mask start indices and mask end indices.
879 Parameters:
880 mask: channel mask, created with create_mask.
881 Returns:
882 List of mask start indices and that of mask end indices,
883 i.e., [istart1,istart2,....], [iend1,iend2,....].
884 """
885 if not (isinstance(mask,list) or isinstance(mask, tuple)):
886 raise TypeError("The mask should be list or tuple.")
887 if len(mask) < 2:
888 raise TypeError("The mask elements should be > 1")
889 istart=[]
890 iend=[]
891 if mask[0]: istart.append(0)
892 for i in range(len(mask)-1):
893 if not mask[i] and mask[i+1]:
894 istart.append(i+1)
895 elif mask[i] and not mask[i+1]:
896 iend.append(i)
897 if mask[len(mask)-1]: iend.append(len(mask)-1)
898 if len(istart) != len(iend):
899 raise RuntimeError("Numbers of mask start != mask end.")
900 for i in range(len(istart)):
901 if istart[i] > iend[i]:
902 raise RuntimeError("Mask start index > mask end index")
903 break
904 return istart,iend
905
906# def get_restfreqs(self):
907# """
908# Get the restfrequency(s) stored in this scantable.
909# The return value(s) are always of unit 'Hz'
910# Parameters:
911# none
912# Returns:
913# a list of doubles
914# """
915# return list(self._getrestfreqs())
916
917 def get_restfreqs(self, ids=None):
918 """
919 Get the restfrequency(s) stored in this scantable.
920 The return value(s) are always of unit 'Hz'
921 Parameters:
922 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
923 be retrieved
924 Returns:
925 dictionary containing ids and a list of doubles for each id
926 """
927 if ids is None:
928 rfreqs={}
929 idlist = self.getmolnos()
930 for i in idlist:
931 rfreqs[i]=list(self._getrestfreqs(i))
932 return rfreqs
933 else:
934 if type(ids)==list or type(ids)==tuple:
935 rfreqs={}
936 for i in ids:
937 rfreqs[i]=list(self._getrestfreqs(i))
938 return rfreqs
939 else:
940 return list(self._getrestfreqs(ids))
941 #return list(self._getrestfreqs(ids))
942
943 def set_restfreqs(self, freqs=None, unit='Hz'):
944 """
945 ********NEED TO BE UPDATED begin************
946 Set or replace the restfrequency specified and
947 If the 'freqs' argument holds a scalar,
948 then that rest frequency will be applied to all the selected
949 data. If the 'freqs' argument holds
950 a vector, then it MUST be of equal or smaller length than
951 the number of IFs (and the available restfrequencies will be
952 replaced by this vector). In this case, *all* data have
953 the restfrequency set per IF according
954 to the corresponding value you give in the 'freqs' vector.
955 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
956 IF 1 gets restfreq 2e9.
957 ********NEED TO BE UPDATED end************
958 You can also specify the frequencies via a linecatalog/
959
960 Parameters:
961 freqs: list of rest frequency values or string idenitfiers
962 unit: unit for rest frequency (default 'Hz')
963
964 Example:
965 # set the given restfrequency for the all currently selected IFs
966 scan.set_restfreqs(freqs=1.4e9)
967 # set multiple restfrequencies to all the selected data
968 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
969 # If the number of IFs in the data is >= 2 the IF0 gets the first
970 # value IF1 the second... NOTE that freqs needs to be
971 # specified in list of list (e.g. [[],[],...] ).
972 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
973 #set the given restfrequency for the whole table (by name)
974 scan.set_restfreqs(freqs="OH1667")
975
976 Note:
977 To do more sophisticate Restfrequency setting, e.g. on a
978 source and IF basis, use scantable.set_selection() before using
979 this function.
980 # provide your scantable is call scan
981 selection = selector()
982 selection.set_name("ORION*")
983 selection.set_ifs([1])
984 scan.set_selection(selection)
985 scan.set_restfreqs(freqs=86.6e9)
986
987 """
988 varlist = vars()
989 from asap import linecatalog
990 # simple value
991 if isinstance(freqs, int) or isinstance(freqs, float):
992 # TT mod
993 #self._setrestfreqs(freqs, "",unit)
994 self._setrestfreqs([freqs], [""],unit)
995 # list of values
996 elif isinstance(freqs, list) or isinstance(freqs, tuple):
997 # list values are scalars
998 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
999 self._setrestfreqs(freqs, [""],unit)
1000 # list values are tuples, (value, name)
1001 elif isinstance(freqs[-1], dict):
1002 #sel = selector()
1003 #savesel = self._getselection()
1004 #iflist = self.getifnos()
1005 #for i in xrange(len(freqs)):
1006 # sel.set_ifs(iflist[i])
1007 # self._setselection(sel)
1008 # self._setrestfreqs(freqs[i], "",unit)
1009 #self._setselection(savesel)
1010 self._setrestfreqs(freqs["value"],
1011 freqs["name"], "MHz")
1012 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1013 sel = selector()
1014 savesel = self._getselection()
1015 iflist = self.getifnos()
1016 if len(freqs)>len(iflist):
1017 raise ValueError("number of elements in list of list exeeds the current IF selections")
1018 for i in xrange(len(freqs)):
1019 sel.set_ifs(iflist[i])
1020 self._setselection(sel)
1021 self._setrestfreqs(freqs[i]["value"],
1022 freqs[i]["name"], "MHz")
1023 self._setselection(savesel)
1024 # freqs are to be taken from a linecatalog
1025 elif isinstance(freqs, linecatalog):
1026 sel = selector()
1027 savesel = self._getselection()
1028 for i in xrange(freqs.nrow()):
1029 sel.set_ifs(iflist[i])
1030 self._setselection(sel)
1031 self._setrestfreqs(freqs.get_frequency(i),
1032 freqs.get_name(i), "MHz")
1033 # ensure that we are not iterating past nIF
1034 if i == self.nif()-1: break
1035 self._setselection(savesel)
1036 else:
1037 return
1038 self._add_history("set_restfreqs", varlist)
1039
1040 def shift_refpix(self, delta):
1041 """
1042 Shift the reference pixel of the Spectra Coordinate by an
1043 integer amount.
1044 Parameters:
1045 delta: the amount to shift by
1046 Note:
1047 Be careful using this with broadband data.
1048 """
1049 Scantable.shift(self, delta)
1050
1051 def history(self, filename=None):
1052 """
1053 Print the history. Optionally to a file.
1054 Parameters:
1055 filename: The name of the file to save the history to.
1056 """
1057 hist = list(self._gethistory())
1058 out = "-"*80
1059 for h in hist:
1060 if h.startswith("---"):
1061 out += "\n"+h
1062 else:
1063 items = h.split("##")
1064 date = items[0]
1065 func = items[1]
1066 items = items[2:]
1067 out += "\n"+date+"\n"
1068 out += "Function: %s\n Parameters:" % (func)
1069 for i in items:
1070 s = i.split("=")
1071 out += "\n %s = %s" % (s[0], s[1])
1072 out += "\n"+"-"*80
1073 if filename is not None:
1074 if filename is "":
1075 filename = 'scantable_history.txt'
1076 import os
1077 filename = os.path.expandvars(os.path.expanduser(filename))
1078 if not os.path.isdir(filename):
1079 data = open(filename, 'w')
1080 data.write(out)
1081 data.close()
1082 else:
1083 msg = "Illegal file name '%s'." % (filename)
1084 if rcParams['verbose']:
1085 print msg
1086 else:
1087 raise IOError(msg)
1088 if rcParams['verbose']:
1089 try:
1090 from IPython.genutils import page as pager
1091 except ImportError:
1092 from pydoc import pager
1093 pager(out)
1094 else:
1095 return out
1096 return
1097 #
1098 # Maths business
1099 #
1100
1101 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1102 """
1103 Return the (time) weighted average of a scan.
1104 Note:
1105 in channels only - align if necessary
1106 Parameters:
1107 mask: an optional mask (only used for 'var' and 'tsys'
1108 weighting)
1109 scanav: True averages each scan separately
1110 False (default) averages all scans together,
1111 weight: Weighting scheme.
1112 'none' (mean no weight)
1113 'var' (1/var(spec) weighted)
1114 'tsys' (1/Tsys**2 weighted)
1115 'tint' (integration time weighted)
1116 'tintsys' (Tint/Tsys**2)
1117 'median' ( median averaging)
1118 The default is 'tint'
1119 align: align the spectra in velocity before averaging. It takes
1120 the time of the first spectrum as reference time.
1121 Example:
1122 # time average the scantable without using a mask
1123 newscan = scan.average_time()
1124 """
1125 varlist = vars()
1126 if weight is None: weight = 'TINT'
1127 if mask is None: mask = ()
1128 if scanav: scanav = "SCAN"
1129 else: scanav = "NONE"
1130 scan = (self, )
1131 try:
1132 if align:
1133 scan = (self.freq_align(insitu=False), )
1134 s = None
1135 if weight.upper() == 'MEDIAN':
1136 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1137 scanav))
1138 else:
1139 s = scantable(self._math._average(scan, mask, weight.upper(),
1140 scanav))
1141 except RuntimeError, msg:
1142 if rcParams['verbose']:
1143 print msg
1144 return
1145 else: raise
1146 s._add_history("average_time", varlist)
1147 print_log()
1148 return s
1149
1150 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1151 """
1152 Return a scan where all spectra are converted to either
1153 Jansky or Kelvin depending upon the flux units of the scan table.
1154 By default the function tries to look the values up internally.
1155 If it can't find them (or if you want to over-ride), you must
1156 specify EITHER jyperk OR eta (and D which it will try to look up
1157 also if you don't set it). jyperk takes precedence if you set both.
1158 Parameters:
1159 jyperk: the Jy / K conversion factor
1160 eta: the aperture efficiency
1161 d: the geomtric diameter (metres)
1162 insitu: if False a new scantable is returned.
1163 Otherwise, the scaling is done in-situ
1164 The default is taken from .asaprc (False)
1165 """
1166 if insitu is None: insitu = rcParams['insitu']
1167 self._math._setinsitu(insitu)
1168 varlist = vars()
1169 if jyperk is None: jyperk = -1.0
1170 if d is None: d = -1.0
1171 if eta is None: eta = -1.0
1172 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1173 s._add_history("convert_flux", varlist)
1174 print_log()
1175 if insitu: self._assign(s)
1176 else: return s
1177
1178 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1179 """
1180 Return a scan after applying a gain-elevation correction.
1181 The correction can be made via either a polynomial or a
1182 table-based interpolation (and extrapolation if necessary).
1183 You specify polynomial coefficients, an ascii table or neither.
1184 If you specify neither, then a polynomial correction will be made
1185 with built in coefficients known for certain telescopes (an error
1186 will occur if the instrument is not known).
1187 The data and Tsys are *divided* by the scaling factors.
1188 Parameters:
1189 poly: Polynomial coefficients (default None) to compute a
1190 gain-elevation correction as a function of
1191 elevation (in degrees).
1192 filename: The name of an ascii file holding correction factors.
1193 The first row of the ascii file must give the column
1194 names and these MUST include columns
1195 "ELEVATION" (degrees) and "FACTOR" (multiply data
1196 by this) somewhere.
1197 The second row must give the data type of the
1198 column. Use 'R' for Real and 'I' for Integer.
1199 An example file would be
1200 (actual factors are arbitrary) :
1201
1202 TIME ELEVATION FACTOR
1203 R R R
1204 0.1 0 0.8
1205 0.2 20 0.85
1206 0.3 40 0.9
1207 0.4 60 0.85
1208 0.5 80 0.8
1209 0.6 90 0.75
1210 method: Interpolation method when correcting from a table.
1211 Values are "nearest", "linear" (default), "cubic"
1212 and "spline"
1213 insitu: if False a new scantable is returned.
1214 Otherwise, the scaling is done in-situ
1215 The default is taken from .asaprc (False)
1216 """
1217
1218 if insitu is None: insitu = rcParams['insitu']
1219 self._math._setinsitu(insitu)
1220 varlist = vars()
1221 if poly is None:
1222 poly = ()
1223 from os.path import expandvars
1224 filename = expandvars(filename)
1225 s = scantable(self._math._gainel(self, poly, filename, method))
1226 s._add_history("gain_el", varlist)
1227 print_log()
1228 if insitu: self._assign(s)
1229 else: return s
1230
1231 def freq_align(self, reftime=None, method='cubic', insitu=None):
1232 """
1233 Return a scan where all rows have been aligned in frequency/velocity.
1234 The alignment frequency frame (e.g. LSRK) is that set by function
1235 set_freqframe.
1236 Parameters:
1237 reftime: reference time to align at. By default, the time of
1238 the first row of data is used.
1239 method: Interpolation method for regridding the spectra.
1240 Choose from "nearest", "linear", "cubic" (default)
1241 and "spline"
1242 insitu: if False a new scantable is returned.
1243 Otherwise, the scaling is done in-situ
1244 The default is taken from .asaprc (False)
1245 """
1246 if insitu is None: insitu = rcParams["insitu"]
1247 self._math._setinsitu(insitu)
1248 varlist = vars()
1249 if reftime is None: reftime = ""
1250 s = scantable(self._math._freq_align(self, reftime, method))
1251 s._add_history("freq_align", varlist)
1252 print_log()
1253 if insitu: self._assign(s)
1254 else: return s
1255
1256 def opacity(self, tau, insitu=None):
1257 """
1258 Apply an opacity correction. The data
1259 and Tsys are multiplied by the correction factor.
1260 Parameters:
1261 tau: Opacity from which the correction factor is
1262 exp(tau*ZD)
1263 where ZD is the zenith-distance
1264 insitu: if False a new scantable is returned.
1265 Otherwise, the scaling is done in-situ
1266 The default is taken from .asaprc (False)
1267 """
1268 if insitu is None: insitu = rcParams['insitu']
1269 self._math._setinsitu(insitu)
1270 varlist = vars()
1271 s = scantable(self._math._opacity(self, tau))
1272 s._add_history("opacity", varlist)
1273 print_log()
1274 if insitu: self._assign(s)
1275 else: return s
1276
1277 def bin(self, width=5, insitu=None):
1278 """
1279 Return a scan where all spectra have been binned up.
1280 Parameters:
1281 width: The bin width (default=5) in pixels
1282 insitu: if False a new scantable is returned.
1283 Otherwise, the scaling is done in-situ
1284 The default is taken from .asaprc (False)
1285 """
1286 if insitu is None: insitu = rcParams['insitu']
1287 self._math._setinsitu(insitu)
1288 varlist = vars()
1289 s = scantable(self._math._bin(self, width))
1290 s._add_history("bin", varlist)
1291 print_log()
1292 if insitu: self._assign(s)
1293 else: return s
1294
1295
1296 def resample(self, width=5, method='cubic', insitu=None):
1297 """
1298 Return a scan where all spectra have been binned up.
1299
1300 Parameters:
1301 width: The bin width (default=5) in pixels
1302 method: Interpolation method when correcting from a table.
1303 Values are "nearest", "linear", "cubic" (default)
1304 and "spline"
1305 insitu: if False a new scantable is returned.
1306 Otherwise, the scaling is done in-situ
1307 The default is taken from .asaprc (False)
1308 """
1309 if insitu is None: insitu = rcParams['insitu']
1310 self._math._setinsitu(insitu)
1311 varlist = vars()
1312 s = scantable(self._math._resample(self, method, width))
1313 s._add_history("resample", varlist)
1314 print_log()
1315 if insitu: self._assign(s)
1316 else: return s
1317
1318
1319 def average_pol(self, mask=None, weight='none'):
1320 """
1321 Average the Polarisations together.
1322 Parameters:
1323 mask: An optional mask defining the region, where the
1324 averaging will be applied. The output will have all
1325 specified points masked.
1326 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1327 weighted), or 'tsys' (1/Tsys**2 weighted)
1328 """
1329 varlist = vars()
1330 if mask is None:
1331 mask = ()
1332 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1333 s._add_history("average_pol", varlist)
1334 print_log()
1335 return s
1336
1337 def average_beam(self, mask=None, weight='none'):
1338 """
1339 Average the Beams together.
1340 Parameters:
1341 mask: An optional mask defining the region, where the
1342 averaging will be applied. The output will have all
1343 specified points masked.
1344 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1345 weighted), or 'tsys' (1/Tsys**2 weighted)
1346 """
1347 varlist = vars()
1348 if mask is None:
1349 mask = ()
1350 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1351 s._add_history("average_beam", varlist)
1352 print_log()
1353 return s
1354
1355 def convert_pol(self, poltype=None):
1356 """
1357 Convert the data to a different polarisation type.
1358 Parameters:
1359 poltype: The new polarisation type. Valid types are:
1360 "linear", "stokes" and "circular"
1361 """
1362 varlist = vars()
1363 try:
1364 s = scantable(self._math._convertpol(self, poltype))
1365 except RuntimeError, msg:
1366 if rcParams['verbose']:
1367 print msg
1368 return
1369 else:
1370 raise
1371 s._add_history("convert_pol", varlist)
1372 print_log()
1373 return s
1374
1375 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1376 """
1377 Smooth the spectrum by the specified kernel (conserving flux).
1378 Parameters:
1379 kernel: The type of smoothing kernel. Select from
1380 'hanning' (default), 'gaussian', 'boxcar' and
1381 'rmedian'
1382 width: The width of the kernel in pixels. For hanning this is
1383 ignored otherwise it defauls to 5 pixels.
1384 For 'gaussian' it is the Full Width Half
1385 Maximum. For 'boxcar' it is the full width.
1386 For 'rmedian' it is the half width.
1387 insitu: if False a new scantable is returned.
1388 Otherwise, the scaling is done in-situ
1389 The default is taken from .asaprc (False)
1390 Example:
1391 none
1392 """
1393 if insitu is None: insitu = rcParams['insitu']
1394 self._math._setinsitu(insitu)
1395 varlist = vars()
1396 s = scantable(self._math._smooth(self, kernel.lower(), width))
1397 s._add_history("smooth", varlist)
1398 print_log()
1399 if insitu: self._assign(s)
1400 else: return s
1401
1402
1403 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1404 """
1405 Return a scan which has been baselined (all rows) by a polynomial.
1406 Parameters:
1407 mask: an optional mask
1408 order: the order of the polynomial (default is 0)
1409 plot: plot the fit and the residual. In this each
1410 indivual fit has to be approved, by typing 'y'
1411 or 'n'
1412 uselin: use linear polynomial fit
1413 insitu: if False a new scantable is returned.
1414 Otherwise, the scaling is done in-situ
1415 The default is taken from .asaprc (False)
1416 Example:
1417 # return a scan baselined by a third order polynomial,
1418 # not using a mask
1419 bscan = scan.poly_baseline(order=3)
1420 """
1421 if insitu is None: insitu = rcParams['insitu']
1422 varlist = vars()
1423 if mask is None:
1424 mask = [True for i in xrange(self.nchan(-1))]
1425 from asap.asapfitter import fitter
1426 try:
1427 f = fitter()
1428 f.set_scan(self, mask)
1429 if uselin:
1430 f.set_function(lpoly=order)
1431 else:
1432 f.set_function(poly=order)
1433 s = f.auto_fit(insitu, plot=plot)
1434 # Save parameters of baseline fits as a class attribute.
1435 # NOTICE: It does not reflect changes in scantable!
1436 self.blpars = f.blpars
1437 s._add_history("poly_baseline", varlist)
1438 print_log()
1439 if insitu: self._assign(s)
1440 else: return s
1441 except RuntimeError:
1442 msg = "The fit failed, possibly because it didn't converge."
1443 if rcParams['verbose']:
1444 print msg
1445 return
1446 else:
1447 raise RuntimeError(msg)
1448
1449
1450 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1451 threshold=3, chan_avg_limit=1, plot=False,
1452 insitu=None):
1453 """
1454 Return a scan which has been baselined (all rows) by a polynomial.
1455 Spectral lines are detected first using linefinder and masked out
1456 to avoid them affecting the baseline solution.
1457
1458 Parameters:
1459 mask: an optional mask retreived from scantable
1460 edge: an optional number of channel to drop at
1461 the edge of spectrum. If only one value is
1462 specified, the same number will be dropped from
1463 both sides of the spectrum. Default is to keep
1464 all channels. Nested tuples represent individual
1465 edge selection for different IFs (a number of spectral
1466 channels can be different)
1467 order: the order of the polynomial (default is 0)
1468 threshold: the threshold used by line finder. It is better to
1469 keep it large as only strong lines affect the
1470 baseline solution.
1471 chan_avg_limit:
1472 a maximum number of consequtive spectral channels to
1473 average during the search of weak and broad lines.
1474 The default is no averaging (and no search for weak
1475 lines). If such lines can affect the fitted baseline
1476 (e.g. a high order polynomial is fitted), increase this
1477 parameter (usually values up to 8 are reasonable). Most
1478 users of this method should find the default value
1479 sufficient.
1480 plot: plot the fit and the residual. In this each
1481 indivual fit has to be approved, by typing 'y'
1482 or 'n'
1483 insitu: if False a new scantable is returned.
1484 Otherwise, the scaling is done in-situ
1485 The default is taken from .asaprc (False)
1486
1487 Example:
1488 scan2=scan.auto_poly_baseline(order=7)
1489 """
1490 if insitu is None: insitu = rcParams['insitu']
1491 varlist = vars()
1492 from asap.asapfitter import fitter
1493 from asap.asaplinefind import linefinder
1494 from asap import _is_sequence_or_number as _is_valid
1495
1496 # check whether edge is set up for each IF individually
1497 individualedge = False;
1498 if len(edge) > 1:
1499 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1500 individualedge = True;
1501
1502 if not _is_valid(edge, int) and not individualedge:
1503 raise ValueError, "Parameter 'edge' has to be an integer or a \
1504 pair of integers specified as a tuple. Nested tuples are allowed \
1505 to make individual selection for different IFs."
1506
1507 curedge = (0, 0)
1508 if individualedge:
1509 for edgepar in edge:
1510 if not _is_valid(edgepar, int):
1511 raise ValueError, "Each element of the 'edge' tuple has \
1512 to be a pair of integers or an integer."
1513 else:
1514 curedge = edge;
1515
1516 # setup fitter
1517 f = fitter()
1518 f.set_function(poly=order)
1519
1520 # setup line finder
1521 fl = linefinder()
1522 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1523
1524 if not insitu:
1525 workscan = self.copy()
1526 else:
1527 workscan = self
1528
1529 fl.set_scan(workscan)
1530
1531 rows = range(workscan.nrow())
1532 # Save parameters of baseline fits & masklists as a class attribute.
1533 # NOTICE: It does not reflect changes in scantable!
1534 if len(rows) > 0:
1535 self.blpars=[]
1536 self.masklists=[]
1537 asaplog.push("Processing:")
1538 for r in rows:
1539 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1540 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1541 workscan.getpol(r), workscan.getcycle(r))
1542 asaplog.push(msg, False)
1543
1544 # figure out edge parameter
1545 if individualedge:
1546 if len(edge) >= workscan.getif(r):
1547 raise RuntimeError, "Number of edge elements appear to " \
1548 "be less than the number of IFs"
1549 curedge = edge[workscan.getif(r)]
1550
1551 # setup line finder
1552 fl.find_lines(r, mask, curedge)
1553 outmask=fl.get_mask()
1554 f.set_scan(workscan, fl.get_mask())
1555 f.x = workscan._getabcissa(r)
1556 f.y = workscan._getspectrum(r)
1557 f.data = None
1558 f.fit()
1559
1560 # Show mask list
1561 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1562 msg = "mask range: "+str(masklist)
1563 asaplog.push(msg, False)
1564
1565 fpar = f.get_parameters()
1566 if plot:
1567 f.plot(residual=True)
1568 x = raw_input("Accept fit ( [y]/n ): ")
1569 if x.upper() == 'N':
1570 self.blpars.append(None)
1571 self.masklists.append(None)
1572 continue
1573 workscan._setspectrum(f.fitter.getresidual(), r)
1574 self.blpars.append(fpar)
1575 self.masklists.append(masklist)
1576 if plot:
1577 f._p.unmap()
1578 f._p = None
1579 workscan._add_history("auto_poly_baseline", varlist)
1580 if insitu:
1581 self._assign(workscan)
1582 else:
1583 return workscan
1584
1585 def rotate_linpolphase(self, angle):
1586 """
1587 Rotate the phase of the complex polarization O=Q+iU correlation.
1588 This is always done in situ in the raw data. So if you call this
1589 function more than once then each call rotates the phase further.
1590 Parameters:
1591 angle: The angle (degrees) to rotate (add) by.
1592 Examples:
1593 scan.rotate_linpolphase(2.3)
1594 """
1595 varlist = vars()
1596 self._math._rotate_linpolphase(self, angle)
1597 self._add_history("rotate_linpolphase", varlist)
1598 print_log()
1599 return
1600
1601
1602 def rotate_xyphase(self, angle):
1603 """
1604 Rotate the phase of the XY correlation. This is always done in situ
1605 in the data. So if you call this function more than once
1606 then each call rotates the phase further.
1607 Parameters:
1608 angle: The angle (degrees) to rotate (add) by.
1609 Examples:
1610 scan.rotate_xyphase(2.3)
1611 """
1612 varlist = vars()
1613 self._math._rotate_xyphase(self, angle)
1614 self._add_history("rotate_xyphase", varlist)
1615 print_log()
1616 return
1617
1618 def swap_linears(self):
1619 """
1620 Swap the linear polarisations XX and YY, or better the first two
1621 polarisations as this also works for ciculars.
1622 """
1623 varlist = vars()
1624 self._math._swap_linears(self)
1625 self._add_history("swap_linears", varlist)
1626 print_log()
1627 return
1628
1629 def invert_phase(self):
1630 """
1631 Invert the phase of the complex polarisation
1632 """
1633 varlist = vars()
1634 self._math._invert_phase(self)
1635 self._add_history("invert_phase", varlist)
1636 print_log()
1637 return
1638
1639 def add(self, offset, insitu=None):
1640 """
1641 Return a scan where all spectra have the offset added
1642 Parameters:
1643 offset: the offset
1644 insitu: if False a new scantable is returned.
1645 Otherwise, the scaling is done in-situ
1646 The default is taken from .asaprc (False)
1647 """
1648 if insitu is None: insitu = rcParams['insitu']
1649 self._math._setinsitu(insitu)
1650 varlist = vars()
1651 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1652 s._add_history("add", varlist)
1653 print_log()
1654 if insitu:
1655 self._assign(s)
1656 else:
1657 return s
1658
1659 def scale(self, factor, tsys=True, insitu=None):
1660 """
1661 Return a scan where all spectra are scaled by the give 'factor'
1662 Parameters:
1663 factor: the scaling factor
1664 insitu: if False a new scantable is returned.
1665 Otherwise, the scaling is done in-situ
1666 The default is taken from .asaprc (False)
1667 tsys: if True (default) then apply the operation to Tsys
1668 as well as the data
1669 """
1670 if insitu is None: insitu = rcParams['insitu']
1671 self._math._setinsitu(insitu)
1672 varlist = vars()
1673 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1674 s._add_history("scale", varlist)
1675 print_log()
1676 if insitu:
1677 self._assign(s)
1678 else:
1679 return s
1680
1681 def auto_quotient(self, preserve=True, mode='paired'):
1682 """
1683 This function allows to build quotients automatically.
1684 It assumes the observation to have the same numer of
1685 "ons" and "offs"
1686 Parameters:
1687 preserve: you can preserve (default) the continuum or
1688 remove it. The equations used are
1689 preserve: Output = Toff * (on/off) - Toff
1690 remove: Output = Toff * (on/off) - Ton
1691 mode: the on/off detection mode
1692 'paired' (default)
1693 identifies 'off' scans by the
1694 trailing '_R' (Mopra/Parkes) or
1695 '_e'/'_w' (Tid) and matches
1696 on/off pairs from the observing pattern
1697 'time'
1698 finds the closest off in time
1699
1700 """
1701 modes = ["time", "paired"]
1702 if not mode in modes:
1703 msg = "please provide valid mode. Valid modes are %s" % (modes)
1704 raise ValueError(msg)
1705 varlist = vars()
1706 s = None
1707 if mode.lower() == "paired":
1708 basesel = self.get_selection()
1709 sel = selector()+basesel
1710 sel.set_query("SRCTYPE==1")
1711 self.set_selection(sel)
1712 offs = self.copy()
1713 sel.set_query("SRCTYPE==0")
1714 self.set_selection(sel)
1715 ons = self.copy()
1716 s = scantable(self._math._quotient(ons, offs, preserve))
1717 self.set_selection(basesel)
1718 elif mode.lower() == "time":
1719 s = scantable(self._math._auto_quotient(self, mode, preserve))
1720 s._add_history("auto_quotient", varlist)
1721 print_log()
1722 return s
1723
1724 def mx_quotient(self, mask = None, weight='median', preserve=True):
1725 """
1726 Form a quotient using "off" beams when observing in "MX" mode.
1727 Parameters:
1728 mask: an optional mask to be used when weight == 'stddev'
1729 weight: How to average the off beams. Default is 'median'.
1730 preserve: you can preserve (default) the continuum or
1731 remove it. The equations used are
1732 preserve: Output = Toff * (on/off) - Toff
1733 remove: Output = Toff * (on/off) - Ton
1734 """
1735 if mask is None: mask = ()
1736 varlist = vars()
1737 on = scantable(self._math._mx_extract(self, 'on'))
1738 preoff = scantable(self._math._mx_extract(self, 'off'))
1739 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1740 from asapmath import quotient
1741 q = quotient(on, off, preserve)
1742 q._add_history("mx_quotient", varlist)
1743 print_log()
1744 return q
1745
1746 def freq_switch(self, insitu=None):
1747 """
1748 Apply frequency switching to the data.
1749 Parameters:
1750 insitu: if False a new scantable is returned.
1751 Otherwise, the swictching is done in-situ
1752 The default is taken from .asaprc (False)
1753 Example:
1754 none
1755 """
1756 if insitu is None: insitu = rcParams['insitu']
1757 self._math._setinsitu(insitu)
1758 varlist = vars()
1759 s = scantable(self._math._freqswitch(self))
1760 s._add_history("freq_switch", varlist)
1761 print_log()
1762 if insitu: self._assign(s)
1763 else: return s
1764
1765 def recalc_azel(self):
1766 """
1767 Recalculate the azimuth and elevation for each position.
1768 Parameters:
1769 none
1770 Example:
1771 """
1772 varlist = vars()
1773 self._recalcazel()
1774 self._add_history("recalc_azel", varlist)
1775 print_log()
1776 return
1777
1778 def __add__(self, other):
1779 varlist = vars()
1780 s = None
1781 if isinstance(other, scantable):
1782 s = scantable(self._math._binaryop(self, other, "ADD"))
1783 elif isinstance(other, float):
1784 s = scantable(self._math._unaryop(self, other, "ADD", False))
1785 else:
1786 raise TypeError("Other input is not a scantable or float value")
1787 s._add_history("operator +", varlist)
1788 print_log()
1789 return s
1790
1791 def __sub__(self, other):
1792 """
1793 implicit on all axes and on Tsys
1794 """
1795 varlist = vars()
1796 s = None
1797 if isinstance(other, scantable):
1798 s = scantable(self._math._binaryop(self, other, "SUB"))
1799 elif isinstance(other, float):
1800 s = scantable(self._math._unaryop(self, other, "SUB", False))
1801 else:
1802 raise TypeError("Other input is not a scantable or float value")
1803 s._add_history("operator -", varlist)
1804 print_log()
1805 return s
1806
1807 def __mul__(self, other):
1808 """
1809 implicit on all axes and on Tsys
1810 """
1811 varlist = vars()
1812 s = None
1813 if isinstance(other, scantable):
1814 s = scantable(self._math._binaryop(self, other, "MUL"))
1815 elif isinstance(other, float):
1816 s = scantable(self._math._unaryop(self, other, "MUL", False))
1817 else:
1818 raise TypeError("Other input is not a scantable or float value")
1819 s._add_history("operator *", varlist)
1820 print_log()
1821 return s
1822
1823
1824 def __div__(self, other):
1825 """
1826 implicit on all axes and on Tsys
1827 """
1828 varlist = vars()
1829 s = None
1830 if isinstance(other, scantable):
1831 s = scantable(self._math._binaryop(self, other, "DIV"))
1832 elif isinstance(other, float):
1833 if other == 0.0:
1834 raise ZeroDivisionError("Dividing by zero is not recommended")
1835 s = scantable(self._math._unaryop(self, other, "DIV", False))
1836 else:
1837 raise TypeError("Other input is not a scantable or float value")
1838 s._add_history("operator /", varlist)
1839 print_log()
1840 return s
1841
1842 def get_fit(self, row=0):
1843 """
1844 Print or return the stored fits for a row in the scantable
1845 Parameters:
1846 row: the row which the fit has been applied to.
1847 """
1848 if row > self.nrow():
1849 return
1850 from asap.asapfit import asapfit
1851 fit = asapfit(self._getfit(row))
1852 if rcParams['verbose']:
1853 print fit
1854 return
1855 else:
1856 return fit.as_dict()
1857
1858 def _add_history(self, funcname, parameters):
1859 # create date
1860 sep = "##"
1861 from datetime import datetime
1862 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1863 hist = dstr+sep
1864 hist += funcname+sep#cdate+sep
1865 if parameters.has_key('self'): del parameters['self']
1866 for k, v in parameters.iteritems():
1867 if type(v) is dict:
1868 for k2, v2 in v.iteritems():
1869 hist += k2
1870 hist += "="
1871 if isinstance(v2, scantable):
1872 hist += 'scantable'
1873 elif k2 == 'mask':
1874 if isinstance(v2, list) or isinstance(v2, tuple):
1875 hist += str(self._zip_mask(v2))
1876 else:
1877 hist += str(v2)
1878 else:
1879 hist += str(v2)
1880 else:
1881 hist += k
1882 hist += "="
1883 if isinstance(v, scantable):
1884 hist += 'scantable'
1885 elif k == 'mask':
1886 if isinstance(v, list) or isinstance(v, tuple):
1887 hist += str(self._zip_mask(v))
1888 else:
1889 hist += str(v)
1890 else:
1891 hist += str(v)
1892 hist += sep
1893 hist = hist[:-2] # remove trailing '##'
1894 self._addhistory(hist)
1895
1896
1897 def _zip_mask(self, mask):
1898 mask = list(mask)
1899 i = 0
1900 segments = []
1901 while mask[i:].count(1):
1902 i += mask[i:].index(1)
1903 if mask[i:].count(0):
1904 j = i + mask[i:].index(0)
1905 else:
1906 j = len(mask)
1907 segments.append([i, j])
1908 i = j
1909 return segments
1910
1911 def _get_ordinate_label(self):
1912 fu = "("+self.get_fluxunit()+")"
1913 import re
1914 lbl = "Intensity"
1915 if re.match(".K.", fu):
1916 lbl = "Brightness Temperature "+ fu
1917 elif re.match(".Jy.", fu):
1918 lbl = "Flux density "+ fu
1919 return lbl
1920
1921 def _check_ifs(self):
1922 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1923 nchans = filter(lambda t: t > 0, nchans)
1924 return (sum(nchans)/len(nchans) == nchans[0])
1925
1926 def _fill(self, names, unit, average, getpt):
1927 import os
1928 from asap._asap import stfiller
1929 first = True
1930 fullnames = []
1931 for name in names:
1932 name = os.path.expandvars(name)
1933 name = os.path.expanduser(name)
1934 if not os.path.exists(name):
1935 msg = "File '%s' does not exists" % (name)
1936 if rcParams['verbose']:
1937 asaplog.push(msg)
1938 print asaplog.pop().strip()
1939 return
1940 raise IOError(msg)
1941 fullnames.append(name)
1942 if average:
1943 asaplog.push('Auto averaging integrations')
1944 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1945 for name in fullnames:
1946 tbl = Scantable(stype)
1947 r = stfiller(tbl)
1948 msg = "Importing %s..." % (name)
1949 asaplog.push(msg, False)
1950 print_log()
1951 r._open(name, -1, -1, getpt)
1952 r._read()
1953 #tbl = r._getdata()
1954 if average:
1955 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1956 #tbl = tbl2
1957 if not first:
1958 tbl = self._math._merge([self, tbl])
1959 #tbl = tbl2
1960 Scantable.__init__(self, tbl)
1961 r._close()
1962 del r, tbl
1963 first = False
1964 if unit is not None:
1965 self.set_fluxunit(unit)
1966 #self.set_freqframe(rcParams['scantable.freqframe'])
1967
Note: See TracBrowser for help on using the repository browser.