source: branches/alma/python/scantable.py@ 1449

Last change on this file since 1449 was 1446, checked in by TakTsutsumi, 16 years ago

Merged recent updates (since 2007) from nrao-asap

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 72.4 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 self._add_history("scantable", varlist)
75 print_log()
76
77 def save(self, name=None, format=None, overwrite=False):
78 """
79 Store the scantable on disk. This can be an asap (aips++) Table,
80 SDFITS or MS2 format.
81 Parameters:
82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
84 and header in 'name'_header.txt)
85 format: an optional file format. Default is ASAP.
86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87 'SDFITS' (save as SDFITS file)
88 'ASCII' (saves as ascii text file)
89 'MS2' (saves as an aips++
90 MeasurementSet V2)
91 overwrite: If the file should be overwritten if it exists.
92 The default False is to return with warning
93 without writing the output. USE WITH CARE.
94 Example:
95 scan.save('myscan.asap')
96 scan.save('myscan.sdfits', 'SDFITS')
97 """
98 from os import path
99 if format is None: format = rcParams['scantable.save']
100 suffix = '.'+format.lower()
101 if name is None or name == "":
102 name = 'scantable'+suffix
103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
105 name = path.expandvars(name)
106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
114 format2 = format.upper()
115 if format2 == 'ASAP':
116 self._save(name)
117 else:
118 from asap._asap import stwriter as stw
119 writer = stw(format2)
120 writer.write(self, name)
121 print_log()
122 return
123
124 def copy(self):
125 """
126 Return a copy of this scantable.
127 Note:
128 This makes a full (deep) copy. scan2 = scan1 makes a reference.
129 Parameters:
130 none
131 Example:
132 copiedscan = scan.copy()
133 """
134 sd = scantable(Scantable._copy(self))
135 return sd
136
137 def drop_scan(self, scanid=None):
138 """
139 Return a new scantable where the specified scan number(s) has(have)
140 been dropped.
141 Parameters:
142 scanid: a (list of) scan number(s)
143 """
144 from asap import _is_sequence_or_number as _is_valid
145 from asap import _to_list
146 from asap import unique
147 if not _is_valid(scanid):
148 if rcParams['verbose']:
149 print "Please specify a scanno to drop from the scantable"
150 return
151 else:
152 raise RuntimeError("No scan given")
153 try:
154 scanid = _to_list(scanid)
155 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
156 for sid in scanid: allscans.remove(sid)
157 if len(allscans) == 0:
158 raise ValueError("Can't remove all scans")
159 except ValueError:
160 if rcParams['verbose']:
161 print "Couldn't find any match."
162 return
163 else: raise
164 try:
165 bsel = self.get_selection()
166 sel = selector()
167 sel.set_scans(allscans)
168 self.set_selection(bsel+sel)
169 scopy = self._copy()
170 self.set_selection(bsel)
171 return scantable(scopy)
172 except RuntimeError:
173 if rcParams['verbose']:
174 print "Couldn't find any match."
175 else:
176 raise
177
178
179 def get_scan(self, scanid=None):
180 """
181 Return a specific scan (by scanno) or collection of scans (by
182 source name) in a new scantable.
183 Note:
184 See scantable.drop_scan() for the inverse operation.
185 Parameters:
186 scanid: a (list of) scanno or a source name, unix-style
187 patterns are accepted for source name matching, e.g.
188 '*_R' gets all 'ref scans
189 Example:
190 # get all scans containing the source '323p459'
191 newscan = scan.get_scan('323p459')
192 # get all 'off' scans
193 refscans = scan.get_scan('*_R')
194 # get a susbset of scans by scanno (as listed in scan.summary())
195 newscan = scan.get_scan([0, 2, 7, 10])
196 """
197 if scanid is None:
198 if rcParams['verbose']:
199 print "Please specify a scan no or name to " \
200 "retrieve from the scantable"
201 return
202 else:
203 raise RuntimeError("No scan given")
204
205 try:
206 bsel = self.get_selection()
207 sel = selector()
208 if type(scanid) is str:
209 sel.set_name(scanid)
210 self.set_selection(bsel+sel)
211 scopy = self._copy()
212 self.set_selection(bsel)
213 return scantable(scopy)
214 elif type(scanid) is int:
215 sel.set_scans([scanid])
216 self.set_selection(bsel+sel)
217 scopy = self._copy()
218 self.set_selection(bsel)
219 return scantable(scopy)
220 elif type(scanid) is list:
221 sel.set_scans(scanid)
222 self.set_selection(sel)
223 scopy = self._copy()
224 self.set_selection(bsel)
225 return scantable(scopy)
226 else:
227 msg = "Illegal scanid type, use 'int' or 'list' if ints."
228 if rcParams['verbose']:
229 print msg
230 else:
231 raise TypeError(msg)
232 except RuntimeError:
233 if rcParams['verbose']: print "Couldn't find any match."
234 else: raise
235
236 def __str__(self):
237 return Scantable._summary(self, True)
238
239 def summary(self, filename=None):
240 """
241 Print a summary of the contents of this scantable.
242 Parameters:
243 filename: the name of a file to write the putput to
244 Default - no file output
245 verbose: print extra info such as the frequency table
246 The default (False) is taken from .asaprc
247 """
248 info = Scantable._summary(self, True)
249 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
250 if filename is not None:
251 if filename is "":
252 filename = 'scantable_summary.txt'
253 from os.path import expandvars, isdir
254 filename = expandvars(filename)
255 if not isdir(filename):
256 data = open(filename, 'w')
257 data.write(info)
258 data.close()
259 else:
260 msg = "Illegal file name '%s'." % (filename)
261 if rcParams['verbose']:
262 print msg
263 else:
264 raise IOError(msg)
265 if rcParams['verbose']:
266 try:
267 from IPython.genutils import page as pager
268 except ImportError:
269 from pydoc import pager
270 pager(info)
271 else:
272 return info
273
274
275 def get_selection(self):
276 """
277 Get the selection object currently set on this scantable.
278 Parameters:
279 none
280 Example:
281 sel = scan.get_selection()
282 sel.set_ifs(0) # select IF 0
283 scan.set_selection(sel) # apply modified selection
284 """
285 return selector(self._getselection())
286
287 def set_selection(self, selection=selector()):
288 """
289 Select a subset of the data. All following operations on this scantable
290 are only applied to thi selection.
291 Parameters:
292 selection: a selector object (default unset the selection)
293 Examples:
294 sel = selector() # create a selection object
295 self.set_scans([0, 3]) # select SCANNO 0 and 3
296 scan.set_selection(sel) # set the selection
297 scan.summary() # will only print summary of scanno 0 an 3
298 scan.set_selection() # unset the selection
299 """
300 self._setselection(selection)
301
302 def get_row(self, row=0, insitu=None):
303 """
304 Select a row in the scantable.
305 Return a scantable with single row.
306 Parameters:
307 row: row no of integration, default is 0.
308 insitu: if False a new scantable is returned.
309 Otherwise, the scaling is done in-situ
310 The default is taken from .asaprc (False)
311 """
312 if insitu is None: insitu = rcParams['insitu']
313 if not insitu:
314 workscan = self.copy()
315 else:
316 workscan = self
317 # Select a row
318 sel=selector()
319 sel.set_scans([workscan.getscan(row)])
320 sel.set_cycles([workscan.getcycle(row)])
321 sel.set_beams([workscan.getbeam(row)])
322 sel.set_ifs([workscan.getif(row)])
323 sel.set_polarisations([workscan.getpol(row)])
324 sel.set_name(workscan._getsourcename(row))
325 workscan.set_selection(sel)
326 if not workscan.nrow() == 1:
327 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
328 raise RuntimeError(msg)
329 del sel
330 if insitu:
331 self._assign(workscan)
332 else:
333 return workscan
334
335 def stats(self, stat='stddev', mask=None):
336 """
337 Determine the specified statistic of the current beam/if/pol
338 Takes a 'mask' as an optional parameter to specify which
339 channels should be excluded.
340 Parameters:
341 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
342 'var', 'stddev', 'avdev', 'rms', 'median'
343 mask: an optional mask specifying where the statistic
344 should be determined.
345 Example:
346 scan.set_unit('channel')
347 msk = scan.create_mask([100, 200], [500, 600])
348 scan.stats(stat='mean', mask=m)
349 """
350 if mask == None:
351 mask = []
352 axes = ['Beam', 'IF', 'Pol', 'Time']
353 if not self._check_ifs():
354 raise ValueError("Cannot apply mask as the IFs have different "
355 "number of channels. Please use setselection() "
356 "to select individual IFs")
357
358 statvals = self._math._stats(self, mask, stat)
359 out = ''
360 axes = []
361 for i in range(self.nrow()):
362 axis = []
363 axis.append(self.getscan(i))
364 axis.append(self.getbeam(i))
365 axis.append(self.getif(i))
366 axis.append(self.getpol(i))
367 axis.append(self.getcycle(i))
368 axes.append(axis)
369 tm = self._gettime(i)
370 src = self._getsourcename(i)
371 out += 'Scan[%d] (%s) ' % (axis[0], src)
372 out += 'Time[%s]:\n' % (tm)
373 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
374 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
375 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
376 out += '= %3.3f\n' % (statvals[i])
377 out += "--------------------------------------------------\n"
378
379 if rcParams['verbose']:
380 print "--------------------------------------------------"
381 print " ", stat
382 print "--------------------------------------------------"
383 print out
384 #else:
385 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
386 # 'axes' : axes,
387 # 'data': statvals}
388 return statvals
389
390 def stddev(self, mask=None):
391 """
392 Determine the standard deviation of the current beam/if/pol
393 Takes a 'mask' as an optional parameter to specify which
394 channels should be excluded.
395 Parameters:
396 mask: an optional mask specifying where the standard
397 deviation should be determined.
398
399 Example:
400 scan.set_unit('channel')
401 msk = scan.create_mask([100, 200], [500, 600])
402 scan.stddev(mask=m)
403 """
404 return self.stats(stat='stddev', mask=mask);
405
406
407 def get_column_names(self):
408 """
409 Return a list of column names, which can be used for selection.
410 """
411 return list(Scantable.get_column_names(self))
412
413 def get_tsys(self):
414 """
415 Return the System temperatures.
416 Returns:
417 a list of Tsys values for the current selection
418 """
419
420 return self._row_callback(self._gettsys, "Tsys")
421
422 def _row_callback(self, callback, label):
423 axes = []
424 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
425 out = ""
426 outvec = []
427 for i in range(self.nrow()):
428 axis = []
429 axis.append(self.getscan(i))
430 axis.append(self.getbeam(i))
431 axis.append(self.getif(i))
432 axis.append(self.getpol(i))
433 axis.append(self.getcycle(i))
434 axes.append(axis)
435 tm = self._gettime(i)
436 src = self._getsourcename(i)
437 out += 'Scan[%d] (%s) ' % (axis[0], src)
438 out += 'Time[%s]:\n' % (tm)
439 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
440 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
441 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
442 outvec.append(callback(i))
443 out += '= %3.3f\n' % (outvec[i])
444 out += "--------------------------------------------------\n"
445 if rcParams['verbose']:
446 print "--------------------------------------------------"
447 print " %s" % (label)
448 print "--------------------------------------------------"
449 print out
450 # disabled because the vector seems more useful
451 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
452 return outvec
453
454 def _get_column(self, callback, row=-1):
455 """
456 """
457 if row == -1:
458 return [callback(i) for i in range(self.nrow())]
459 else:
460 if 0 <= row < self.nrow():
461 return callback(row)
462
463
464 def get_time(self, row=-1, asdatetime=False):
465 """
466 Get a list of time stamps for the observations.
467 Return a datetime object for each integration time stamp in the scantable.
468 Parameters:
469 row: row no of integration. Default -1 return all rows
470 asdatetime: return values as datetime objects rather than strings
471 Example:
472 none
473 """
474 from time import strptime
475 from datetime import datetime
476 if not asdatetime:
477 return self._get_column(self._gettime, row)
478 format = "%Y/%m/%d/%H:%M:%S"
479 if isinstance(times, list):
480 return [datetime(*strptime(i, format)[:6]) for i in times]
481 else:
482 return datetime(*strptime(times, format)[:6])
483
484
485 def get_inttime(self, row=-1):
486 """
487 Get a list of integration times for the observations.
488 Return a time in seconds for each integration in the scantable.
489 Parameters:
490 row: row no of integration. Default -1 return all rows.
491 Example:
492 none
493 """
494 return self._get_column(self._getinttime, row)
495
496
497 def get_sourcename(self, row=-1):
498 """
499 Get a list source names for the observations.
500 Return a string for each integration in the scantable.
501 Parameters:
502 row: row no of integration. Default -1 return all rows.
503 Example:
504 none
505 """
506 return self._get_column(self._getsourcename, row)
507
508 def get_elevation(self, row=-1):
509 """
510 Get a list of elevations for the observations.
511 Return a float for each integration in the scantable.
512 Parameters:
513 row: row no of integration. Default -1 return all rows.
514 Example:
515 none
516 """
517 return self._get_column(self._getelevation, row)
518
519 def get_azimuth(self, row=-1):
520 """
521 Get a list of azimuths for the observations.
522 Return a float for each integration in the scantable.
523 Parameters:
524 row: row no of integration. Default -1 return all rows.
525 Example:
526 none
527 """
528 return self._get_column(self._getazimuth, row)
529
530 def get_parangle(self, row=-1):
531 """
532 Get a list of parallactic angles for the observations.
533 Return a float for each integration in the scantable.
534 Parameters:
535 row: row no of integration. Default -1 return all rows.
536 Example:
537 none
538 """
539 return self._get_column(self._getparangle, row)
540
541 def get_direction(self, row=-1):
542 """
543 Get a list of Positions on the sky (direction) for the observations.
544 Return a float for each integration in the scantable.
545 Parameters:
546 row: row no of integration. Default -1 return all rows
547 Example:
548 none
549 """
550 return self._get_column(self._getdirection, row)
551
552 def get_directionval(self, row=-1):
553 """
554 Get a list of Positions on the sky (direction) for the observations.
555 Return a float for each integration in the scantable.
556 Parameters:
557 row: row no of integration. Default -1 return all rows
558 Example:
559 none
560 """
561 return self._get_column(self._getdirectionvec, row)
562
563 def set_unit(self, unit='channel'):
564 """
565 Set the unit for all following operations on this scantable
566 Parameters:
567 unit: optional unit, default is 'channel'
568 one of '*Hz', 'km/s', 'channel', ''
569 """
570 varlist = vars()
571 if unit in ['', 'pixel', 'channel']:
572 unit = ''
573 inf = list(self._getcoordinfo())
574 inf[0] = unit
575 self._setcoordinfo(inf)
576 self._add_history("set_unit", varlist)
577
578 def set_instrument(self, instr):
579 """
580 Set the instrument for subsequent processing.
581 Parameters:
582 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
583 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
584 """
585 self._setInstrument(instr)
586 self._add_history("set_instument", vars())
587 print_log()
588
589 def set_feedtype(self, feedtype):
590 """
591 Overwrite the feed type, which might not be set correctly.
592 Parameters:
593 feedtype: 'linear' or 'circular'
594 """
595 self._setfeedtype(feedtype)
596 self._add_history("set_feedtype", vars())
597 print_log()
598
599 def set_doppler(self, doppler='RADIO'):
600 """
601 Set the doppler for all following operations on this scantable.
602 Parameters:
603 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
604 """
605 varlist = vars()
606 inf = list(self._getcoordinfo())
607 inf[2] = doppler
608 self._setcoordinfo(inf)
609 self._add_history("set_doppler", vars())
610 print_log()
611
612 def set_freqframe(self, frame=None):
613 """
614 Set the frame type of the Spectral Axis.
615 Parameters:
616 frame: an optional frame type, default 'LSRK'. Valid frames are:
617 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
618 'GEO', 'GALACTO', 'LGROUP', 'CMB'
619 Examples:
620 scan.set_freqframe('BARY')
621 """
622 if frame is None: frame = rcParams['scantable.freqframe']
623 varlist = vars()
624 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
625 'GEO', 'GALACTO', 'LGROUP', 'CMB']
626
627 if frame in valid:
628 inf = list(self._getcoordinfo())
629 inf[1] = frame
630 self._setcoordinfo(inf)
631 self._add_history("set_freqframe", varlist)
632 else:
633 msg = "Please specify a valid freq type. Valid types are:\n", valid
634 if rcParams['verbose']:
635 print msg
636 else:
637 raise TypeError(msg)
638 print_log()
639
640 def set_dirframe(self, frame=""):
641 """
642 Set the frame type of the Direction on the sky.
643 Parameters:
644 frame: an optional frame type, default ''. Valid frames are:
645 'J2000', 'B1950', 'GALACTIC'
646 Examples:
647 scan.set_dirframe('GALACTIC')
648 """
649 varlist = vars()
650 try:
651 Scantable.set_dirframe(self, frame)
652 except RuntimeError, msg:
653 if rcParams['verbose']:
654 print msg
655 else:
656 raise
657 self._add_history("set_dirframe", varlist)
658
659 def get_unit(self):
660 """
661 Get the default unit set in this scantable
662 Returns:
663 A unit string
664 """
665 inf = self._getcoordinfo()
666 unit = inf[0]
667 if unit == '': unit = 'channel'
668 return unit
669
670 def get_abcissa(self, rowno=0):
671 """
672 Get the abcissa in the current coordinate setup for the currently
673 selected Beam/IF/Pol
674 Parameters:
675 rowno: an optional row number in the scantable. Default is the
676 first row, i.e. rowno=0
677 Returns:
678 The abcissa values and the format string (as a dictionary)
679 """
680 abc = self._getabcissa(rowno)
681 lbl = self._getabcissalabel(rowno)
682 print_log()
683 return abc, lbl
684
685 def flag(self, mask=None, unflag=False):
686 """
687 Flag the selected data using an optional channel mask.
688 Parameters:
689 mask: an optional channel mask, created with create_mask. Default
690 (no mask) is all channels.
691 unflag: if True, unflag the data
692 """
693 varlist = vars()
694 if mask is None:
695 mask = []
696 try:
697 self._flag(mask, unflag)
698 except RuntimeError, msg:
699 if rcParams['verbose']:
700 print msg
701 return
702 else: raise
703 self._add_history("flag", varlist)
704
705 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
706 """
707 Flag the data in 'lag' space by providing a frequency to remove.
708 Flagged data in the scantable gets set to 0.0 before the fft.
709 No taper is applied.
710 Parameters:
711 frequency: the frequency (really a period within the bandwidth)
712 to remove
713 width: the width of the frequency to remove, to remove a
714 range of frequencies aroung the centre.
715 unit: the frequency unit (default "GHz")
716 Notes:
717 It is recommended to flag edges of the band or strong
718 signals beforehand.
719 """
720 if insitu is None: insitu = rcParams['insitu']
721 self._math._setinsitu(insitu)
722 varlist = vars()
723 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
724 if not base.has_key(unit):
725 raise ValueError("%s is not a valid unit." % unit)
726 try:
727 s = scantable(self._math._lag_flag(self, frequency*base[unit],
728 width*base[unit]))
729 except RuntimeError, msg:
730 if rcParams['verbose']:
731 print msg
732 return
733 else: raise
734 s._add_history("lag_flag", varlist)
735 print_log()
736 if insitu:
737 self._assign(s)
738 else:
739 return s
740
741
742 def create_mask(self, *args, **kwargs):
743 """
744 Compute and return a mask based on [min, max] windows.
745 The specified windows are to be INCLUDED, when the mask is
746 applied.
747 Parameters:
748 [min, max], [min2, max2], ...
749 Pairs of start/end points (inclusive)specifying the regions
750 to be masked
751 invert: optional argument. If specified as True,
752 return an inverted mask, i.e. the regions
753 specified are EXCLUDED
754 row: create the mask using the specified row for
755 unit conversions, default is row=0
756 only necessary if frequency varies over rows.
757 Example:
758 scan.set_unit('channel')
759 a)
760 msk = scan.create_mask([400, 500], [800, 900])
761 # masks everything outside 400 and 500
762 # and 800 and 900 in the unit 'channel'
763
764 b)
765 msk = scan.create_mask([400, 500], [800, 900], invert=True)
766 # masks the regions between 400 and 500
767 # and 800 and 900 in the unit 'channel'
768 c)
769 mask only channel 400
770 msk = scan.create_mask([400, 400])
771 """
772 row = 0
773 if kwargs.has_key("row"):
774 row = kwargs.get("row")
775 data = self._getabcissa(row)
776 u = self._getcoordinfo()[0]
777 if rcParams['verbose']:
778 if u == "": u = "channel"
779 msg = "The current mask window unit is %s" % u
780 i = self._check_ifs()
781 if not i:
782 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
783 asaplog.push(msg)
784 n = self.nchan()
785 msk = _n_bools(n, False)
786 # test if args is a 'list' or a 'normal *args - UGLY!!!
787
788 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
789 and args or args[0]
790 for window in ws:
791 if (len(window) != 2 or window[0] > window[1] ):
792 raise TypeError("A window needs to be defined as [min, max]")
793 for i in range(n):
794 if data[i] >= window[0] and data[i] <= window[1]:
795 msk[i] = True
796 if kwargs.has_key('invert'):
797 if kwargs.get('invert'):
798 msk = mask_not(msk)
799 print_log()
800 return msk
801
802 def get_masklist(self, mask=None, row=0):
803 """
804 Compute and return a list of mask windows, [min, max].
805 Parameters:
806 mask: channel mask, created with create_mask.
807 row: calcutate the masklist using the specified row
808 for unit conversions, default is row=0
809 only necessary if frequency varies over rows.
810 Returns:
811 [min, max], [min2, max2], ...
812 Pairs of start/end points (inclusive)specifying
813 the masked regions
814 """
815 if not (isinstance(mask,list) or isinstance(mask, tuple)):
816 raise TypeError("The mask should be list or tuple.")
817 if len(mask) < 2:
818 raise TypeError("The mask elements should be > 1")
819 if self.nchan() != len(mask):
820 msg = "Number of channels in scantable != number of mask elements"
821 raise TypeError(msg)
822 data = self._getabcissa(row)
823 u = self._getcoordinfo()[0]
824 if rcParams['verbose']:
825 if u == "": u = "channel"
826 msg = "The current mask window unit is %s" % u
827 i = self._check_ifs()
828 if not i:
829 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
830 asaplog.push(msg)
831 masklist=[]
832 ist, ien = None, None
833 ist, ien=self.get_mask_indices(mask)
834 if ist is not None and ien is not None:
835 for i in xrange(len(ist)):
836 range=[data[ist[i]],data[ien[i]]]
837 range.sort()
838 masklist.append([range[0],range[1]])
839 return masklist
840
841 def get_mask_indices(self, mask=None):
842 """
843 Compute and Return lists of mask start indices and mask end indices.
844 Parameters:
845 mask: channel mask, created with create_mask.
846 Returns:
847 List of mask start indices and that of mask end indices,
848 i.e., [istart1,istart2,....], [iend1,iend2,....].
849 """
850 if not (isinstance(mask,list) or isinstance(mask, tuple)):
851 raise TypeError("The mask should be list or tuple.")
852 if len(mask) < 2:
853 raise TypeError("The mask elements should be > 1")
854 istart=[]
855 iend=[]
856 if mask[0]: istart.append(0)
857 for i in range(len(mask)-1):
858 if not mask[i] and mask[i+1]:
859 istart.append(i+1)
860 elif mask[i] and not mask[i+1]:
861 iend.append(i)
862 if mask[len(mask)-1]: iend.append(len(mask)-1)
863 if len(istart) != len(iend):
864 raise RuntimeError("Numbers of mask start != mask end.")
865 for i in range(len(istart)):
866 if istart[i] > iend[i]:
867 raise RuntimeError("Mask start index > mask end index")
868 break
869 return istart,iend
870
871# def get_restfreqs(self):
872# """
873# Get the restfrequency(s) stored in this scantable.
874# The return value(s) are always of unit 'Hz'
875# Parameters:
876# none
877# Returns:
878# a list of doubles
879# """
880# return list(self._getrestfreqs())
881
882 def get_restfreqs(self, ids=None):
883 """
884 Get the restfrequency(s) stored in this scantable.
885 The return value(s) are always of unit 'Hz'
886 Parameters:
887 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
888 be retrieved
889 Returns:
890 dictionary containing ids and a list of doubles for each id
891 """
892 if ids is None:
893 rfreqs={}
894 idlist = self.getmolnos()
895 for i in idlist:
896 rfreqs[i]=list(self._getrestfreqs(i))
897 return rfreqs
898 else:
899 if type(ids)==list or type(ids)==tuple:
900 rfreqs={}
901 for i in ids:
902 rfreqs[i]=list(self._getrestfreqs(i))
903 return rfreqs
904 else:
905 return list(self._getrestfreqs(ids))
906 #return list(self._getrestfreqs(ids))
907
908 def set_restfreqs(self, freqs=None, unit='Hz'):
909 """
910 ********NEED TO BE UPDATED begin************
911 Set or replace the restfrequency specified and
912 If the 'freqs' argument holds a scalar,
913 then that rest frequency will be applied to all the selected
914 data. If the 'freqs' argument holds
915 a vector, then it MUST be of equal or smaller length than
916 the number of IFs (and the available restfrequencies will be
917 replaced by this vector). In this case, *all* data have
918 the restfrequency set per IF according
919 to the corresponding value you give in the 'freqs' vector.
920 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
921 IF 1 gets restfreq 2e9.
922 ********NEED TO BE UPDATED end************
923 You can also specify the frequencies via a linecatalog/
924
925 Parameters:
926 freqs: list of rest frequency values or string idenitfiers
927 unit: unit for rest frequency (default 'Hz')
928
929 Example:
930 # set the given restfrequency for the all currently selected IFs
931 scan.set_restfreqs(freqs=1.4e9)
932 # set multiple restfrequencies to all the selected data
933 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
934 # If the number of IFs in the data is >= 2 the IF0 gets the first
935 # value IF1 the second... NOTE that freqs needs to be
936 # specified in list of list (e.g. [[],[],...] ).
937 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
938 #set the given restfrequency for the whole table (by name)
939 scan.set_restfreqs(freqs="OH1667")
940
941 Note:
942 To do more sophisticate Restfrequency setting, e.g. on a
943 source and IF basis, use scantable.set_selection() before using
944 this function.
945 # provide your scantable is call scan
946 selection = selector()
947 selection.set_name("ORION*")
948 selection.set_ifs([1])
949 scan.set_selection(selection)
950 scan.set_restfreqs(freqs=86.6e9)
951
952 """
953 varlist = vars()
954 from asap import linecatalog
955 # simple value
956 if isinstance(freqs, int) or isinstance(freqs, float):
957 # TT mod
958 #self._setrestfreqs(freqs, "",unit)
959 self._setrestfreqs([freqs], [""],unit)
960 # list of values
961 elif isinstance(freqs, list) or isinstance(freqs, tuple):
962 # list values are scalars
963 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
964 self._setrestfreqs(freqs, [""],unit)
965 # list values are tuples, (value, name)
966 elif isinstance(freqs[-1], dict):
967 #sel = selector()
968 #savesel = self._getselection()
969 #iflist = self.getifnos()
970 #for i in xrange(len(freqs)):
971 # sel.set_ifs(iflist[i])
972 # self._setselection(sel)
973 # self._setrestfreqs(freqs[i], "",unit)
974 #self._setselection(savesel)
975 self._setrestfreqs(freqs["value"],
976 freqs["name"], "MHz")
977 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
978 sel = selector()
979 savesel = self._getselection()
980 iflist = self.getifnos()
981 if len(freqs)>len(iflist):
982 raise ValueError("number of elements in list of list exeeds the current IF selections")
983 for i in xrange(len(freqs)):
984 sel.set_ifs(iflist[i])
985 self._setselection(sel)
986 self._setrestfreqs(freqs[i]["value"],
987 freqs[i]["name"], "MHz")
988 self._setselection(savesel)
989 # freqs are to be taken from a linecatalog
990 elif isinstance(freqs, linecatalog):
991 sel = selector()
992 savesel = self._getselection()
993 for i in xrange(freqs.nrow()):
994 sel.set_ifs(iflist[i])
995 self._setselection(sel)
996 self._setrestfreqs(freqs.get_frequency(i),
997 freqs.get_name(i), "MHz")
998 # ensure that we are not iterating past nIF
999 if i == self.nif()-1: break
1000 self._setselection(savesel)
1001 else:
1002 return
1003 self._add_history("set_restfreqs", varlist)
1004
1005 def shift_refpix(self, delta):
1006 """
1007 Shift the reference pixel of the Spectra Coordinate by an
1008 integer amount.
1009 Parameters:
1010 delta: the amount to shift by
1011 Note:
1012 Be careful using this with broadband data.
1013 """
1014 Scantable.shift(self, delta)
1015
1016 def history(self, filename=None):
1017 """
1018 Print the history. Optionally to a file.
1019 Parameters:
1020 filename: The name of the file to save the history to.
1021 """
1022 hist = list(self._gethistory())
1023 out = "-"*80
1024 for h in hist:
1025 if h.startswith("---"):
1026 out += "\n"+h
1027 else:
1028 items = h.split("##")
1029 date = items[0]
1030 func = items[1]
1031 items = items[2:]
1032 out += "\n"+date+"\n"
1033 out += "Function: %s\n Parameters:" % (func)
1034 for i in items:
1035 s = i.split("=")
1036 out += "\n %s = %s" % (s[0], s[1])
1037 out += "\n"+"-"*80
1038 if filename is not None:
1039 if filename is "":
1040 filename = 'scantable_history.txt'
1041 import os
1042 filename = os.path.expandvars(os.path.expanduser(filename))
1043 if not os.path.isdir(filename):
1044 data = open(filename, 'w')
1045 data.write(out)
1046 data.close()
1047 else:
1048 msg = "Illegal file name '%s'." % (filename)
1049 if rcParams['verbose']:
1050 print msg
1051 else:
1052 raise IOError(msg)
1053 if rcParams['verbose']:
1054 try:
1055 from IPython.genutils import page as pager
1056 except ImportError:
1057 from pydoc import pager
1058 pager(out)
1059 else:
1060 return out
1061 return
1062 #
1063 # Maths business
1064 #
1065
1066 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1067 """
1068 Return the (time) weighted average of a scan.
1069 Note:
1070 in channels only - align if necessary
1071 Parameters:
1072 mask: an optional mask (only used for 'var' and 'tsys'
1073 weighting)
1074 scanav: True averages each scan separately
1075 False (default) averages all scans together,
1076 weight: Weighting scheme.
1077 'none' (mean no weight)
1078 'var' (1/var(spec) weighted)
1079 'tsys' (1/Tsys**2 weighted)
1080 'tint' (integration time weighted)
1081 'tintsys' (Tint/Tsys**2)
1082 'median' ( median averaging)
1083 The default is 'tint'
1084 align: align the spectra in velocity before averaging. It takes
1085 the time of the first spectrum as reference time.
1086 Example:
1087 # time average the scantable without using a mask
1088 newscan = scan.average_time()
1089 """
1090 varlist = vars()
1091 if weight is None: weight = 'TINT'
1092 if mask is None: mask = ()
1093 if scanav: scanav = "SCAN"
1094 else: scanav = "NONE"
1095 scan = (self, )
1096 try:
1097 if align:
1098 scan = (self.freq_align(insitu=False), )
1099 s = None
1100 if weight.upper() == 'MEDIAN':
1101 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1102 scanav))
1103 else:
1104 s = scantable(self._math._average(scan, mask, weight.upper(),
1105 scanav))
1106 except RuntimeError, msg:
1107 if rcParams['verbose']:
1108 print msg
1109 return
1110 else: raise
1111 s._add_history("average_time", varlist)
1112 print_log()
1113 return s
1114
1115 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1116 """
1117 Return a scan where all spectra are converted to either
1118 Jansky or Kelvin depending upon the flux units of the scan table.
1119 By default the function tries to look the values up internally.
1120 If it can't find them (or if you want to over-ride), you must
1121 specify EITHER jyperk OR eta (and D which it will try to look up
1122 also if you don't set it). jyperk takes precedence if you set both.
1123 Parameters:
1124 jyperk: the Jy / K conversion factor
1125 eta: the aperture efficiency
1126 d: the geomtric diameter (metres)
1127 insitu: if False a new scantable is returned.
1128 Otherwise, the scaling is done in-situ
1129 The default is taken from .asaprc (False)
1130 """
1131 if insitu is None: insitu = rcParams['insitu']
1132 self._math._setinsitu(insitu)
1133 varlist = vars()
1134 if jyperk is None: jyperk = -1.0
1135 if d is None: d = -1.0
1136 if eta is None: eta = -1.0
1137 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1138 s._add_history("convert_flux", varlist)
1139 print_log()
1140 if insitu: self._assign(s)
1141 else: return s
1142
1143 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1144 """
1145 Return a scan after applying a gain-elevation correction.
1146 The correction can be made via either a polynomial or a
1147 table-based interpolation (and extrapolation if necessary).
1148 You specify polynomial coefficients, an ascii table or neither.
1149 If you specify neither, then a polynomial correction will be made
1150 with built in coefficients known for certain telescopes (an error
1151 will occur if the instrument is not known).
1152 The data and Tsys are *divided* by the scaling factors.
1153 Parameters:
1154 poly: Polynomial coefficients (default None) to compute a
1155 gain-elevation correction as a function of
1156 elevation (in degrees).
1157 filename: The name of an ascii file holding correction factors.
1158 The first row of the ascii file must give the column
1159 names and these MUST include columns
1160 "ELEVATION" (degrees) and "FACTOR" (multiply data
1161 by this) somewhere.
1162 The second row must give the data type of the
1163 column. Use 'R' for Real and 'I' for Integer.
1164 An example file would be
1165 (actual factors are arbitrary) :
1166
1167 TIME ELEVATION FACTOR
1168 R R R
1169 0.1 0 0.8
1170 0.2 20 0.85
1171 0.3 40 0.9
1172 0.4 60 0.85
1173 0.5 80 0.8
1174 0.6 90 0.75
1175 method: Interpolation method when correcting from a table.
1176 Values are "nearest", "linear" (default), "cubic"
1177 and "spline"
1178 insitu: if False a new scantable is returned.
1179 Otherwise, the scaling is done in-situ
1180 The default is taken from .asaprc (False)
1181 """
1182
1183 if insitu is None: insitu = rcParams['insitu']
1184 self._math._setinsitu(insitu)
1185 varlist = vars()
1186 if poly is None:
1187 poly = ()
1188 from os.path import expandvars
1189 filename = expandvars(filename)
1190 s = scantable(self._math._gainel(self, poly, filename, method))
1191 s._add_history("gain_el", varlist)
1192 print_log()
1193 if insitu: self._assign(s)
1194 else: return s
1195
1196 def freq_align(self, reftime=None, method='cubic', insitu=None):
1197 """
1198 Return a scan where all rows have been aligned in frequency/velocity.
1199 The alignment frequency frame (e.g. LSRK) is that set by function
1200 set_freqframe.
1201 Parameters:
1202 reftime: reference time to align at. By default, the time of
1203 the first row of data is used.
1204 method: Interpolation method for regridding the spectra.
1205 Choose from "nearest", "linear", "cubic" (default)
1206 and "spline"
1207 insitu: if False a new scantable is returned.
1208 Otherwise, the scaling is done in-situ
1209 The default is taken from .asaprc (False)
1210 """
1211 if insitu is None: insitu = rcParams["insitu"]
1212 self._math._setinsitu(insitu)
1213 varlist = vars()
1214 if reftime is None: reftime = ""
1215 s = scantable(self._math._freq_align(self, reftime, method))
1216 s._add_history("freq_align", varlist)
1217 print_log()
1218 if insitu: self._assign(s)
1219 else: return s
1220
1221 def opacity(self, tau, insitu=None):
1222 """
1223 Apply an opacity correction. The data
1224 and Tsys are multiplied by the correction factor.
1225 Parameters:
1226 tau: Opacity from which the correction factor is
1227 exp(tau*ZD)
1228 where ZD is the zenith-distance
1229 insitu: if False a new scantable is returned.
1230 Otherwise, the scaling is done in-situ
1231 The default is taken from .asaprc (False)
1232 """
1233 if insitu is None: insitu = rcParams['insitu']
1234 self._math._setinsitu(insitu)
1235 varlist = vars()
1236 s = scantable(self._math._opacity(self, tau))
1237 s._add_history("opacity", varlist)
1238 print_log()
1239 if insitu: self._assign(s)
1240 else: return s
1241
1242 def bin(self, width=5, insitu=None):
1243 """
1244 Return a scan where all spectra have been binned up.
1245 Parameters:
1246 width: The bin width (default=5) in pixels
1247 insitu: if False a new scantable is returned.
1248 Otherwise, the scaling is done in-situ
1249 The default is taken from .asaprc (False)
1250 """
1251 if insitu is None: insitu = rcParams['insitu']
1252 self._math._setinsitu(insitu)
1253 varlist = vars()
1254 s = scantable(self._math._bin(self, width))
1255 s._add_history("bin", varlist)
1256 print_log()
1257 if insitu: self._assign(s)
1258 else: return s
1259
1260
1261 def resample(self, width=5, method='cubic', insitu=None):
1262 """
1263 Return a scan where all spectra have been binned up.
1264
1265 Parameters:
1266 width: The bin width (default=5) in pixels
1267 method: Interpolation method when correcting from a table.
1268 Values are "nearest", "linear", "cubic" (default)
1269 and "spline"
1270 insitu: if False a new scantable is returned.
1271 Otherwise, the scaling is done in-situ
1272 The default is taken from .asaprc (False)
1273 """
1274 if insitu is None: insitu = rcParams['insitu']
1275 self._math._setinsitu(insitu)
1276 varlist = vars()
1277 s = scantable(self._math._resample(self, method, width))
1278 s._add_history("resample", varlist)
1279 print_log()
1280 if insitu: self._assign(s)
1281 else: return s
1282
1283
1284 def average_pol(self, mask=None, weight='none'):
1285 """
1286 Average the Polarisations together.
1287 Parameters:
1288 mask: An optional mask defining the region, where the
1289 averaging will be applied. The output will have all
1290 specified points masked.
1291 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1292 weighted), or 'tsys' (1/Tsys**2 weighted)
1293 """
1294 varlist = vars()
1295 if mask is None:
1296 mask = ()
1297 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1298 s._add_history("average_pol", varlist)
1299 print_log()
1300 return s
1301
1302 def average_beam(self, mask=None, weight='none'):
1303 """
1304 Average the Beams together.
1305 Parameters:
1306 mask: An optional mask defining the region, where the
1307 averaging will be applied. The output will have all
1308 specified points masked.
1309 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1310 weighted), or 'tsys' (1/Tsys**2 weighted)
1311 """
1312 varlist = vars()
1313 if mask is None:
1314 mask = ()
1315 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1316 s._add_history("average_beam", varlist)
1317 print_log()
1318 return s
1319
1320 def convert_pol(self, poltype=None):
1321 """
1322 Convert the data to a different polarisation type.
1323 Parameters:
1324 poltype: The new polarisation type. Valid types are:
1325 "linear", "stokes" and "circular"
1326 """
1327 varlist = vars()
1328 try:
1329 s = scantable(self._math._convertpol(self, poltype))
1330 except RuntimeError, msg:
1331 if rcParams['verbose']:
1332 print msg
1333 return
1334 else:
1335 raise
1336 s._add_history("convert_pol", varlist)
1337 print_log()
1338 return s
1339
1340 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1341 """
1342 Smooth the spectrum by the specified kernel (conserving flux).
1343 Parameters:
1344 kernel: The type of smoothing kernel. Select from
1345 'hanning' (default), 'gaussian', 'boxcar' and
1346 'rmedian'
1347 width: The width of the kernel in pixels. For hanning this is
1348 ignored otherwise it defauls to 5 pixels.
1349 For 'gaussian' it is the Full Width Half
1350 Maximum. For 'boxcar' it is the full width.
1351 For 'rmedian' it is the half width.
1352 insitu: if False a new scantable is returned.
1353 Otherwise, the scaling is done in-situ
1354 The default is taken from .asaprc (False)
1355 Example:
1356 none
1357 """
1358 if insitu is None: insitu = rcParams['insitu']
1359 self._math._setinsitu(insitu)
1360 varlist = vars()
1361 s = scantable(self._math._smooth(self, kernel.lower(), width))
1362 s._add_history("smooth", varlist)
1363 print_log()
1364 if insitu: self._assign(s)
1365 else: return s
1366
1367
1368 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1369 """
1370 Return a scan which has been baselined (all rows) by a polynomial.
1371 Parameters:
1372 mask: an optional mask
1373 order: the order of the polynomial (default is 0)
1374 plot: plot the fit and the residual. In this each
1375 indivual fit has to be approved, by typing 'y'
1376 or 'n'
1377 uselin: use linear polynomial fit
1378 insitu: if False a new scantable is returned.
1379 Otherwise, the scaling is done in-situ
1380 The default is taken from .asaprc (False)
1381 Example:
1382 # return a scan baselined by a third order polynomial,
1383 # not using a mask
1384 bscan = scan.poly_baseline(order=3)
1385 """
1386 if insitu is None: insitu = rcParams['insitu']
1387 varlist = vars()
1388 if mask is None:
1389 mask = [True for i in xrange(self.nchan(-1))]
1390 from asap.asapfitter import fitter
1391 try:
1392 f = fitter()
1393 f.set_scan(self, mask)
1394 if uselin:
1395 f.set_function(lpoly=order)
1396 else:
1397 f.set_function(poly=order)
1398 s = f.auto_fit(insitu, plot=plot)
1399 # Save parameters of baseline fits as a class attribute.
1400 # NOTICE: It does not reflect changes in scantable!
1401 self.blpars = f.blpars
1402 s._add_history("poly_baseline", varlist)
1403 print_log()
1404 if insitu: self._assign(s)
1405 else: return s
1406 except RuntimeError:
1407 msg = "The fit failed, possibly because it didn't converge."
1408 if rcParams['verbose']:
1409 print msg
1410 return
1411 else:
1412 raise RuntimeError(msg)
1413
1414
1415 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1416 threshold=3, chan_avg_limit=1, plot=False,
1417 insitu=None):
1418 """
1419 Return a scan which has been baselined (all rows) by a polynomial.
1420 Spectral lines are detected first using linefinder and masked out
1421 to avoid them affecting the baseline solution.
1422
1423 Parameters:
1424 mask: an optional mask retreived from scantable
1425 edge: an optional number of channel to drop at
1426 the edge of spectrum. If only one value is
1427 specified, the same number will be dropped from
1428 both sides of the spectrum. Default is to keep
1429 all channels. Nested tuples represent individual
1430 edge selection for different IFs (a number of spectral
1431 channels can be different)
1432 order: the order of the polynomial (default is 0)
1433 threshold: the threshold used by line finder. It is better to
1434 keep it large as only strong lines affect the
1435 baseline solution.
1436 chan_avg_limit:
1437 a maximum number of consequtive spectral channels to
1438 average during the search of weak and broad lines.
1439 The default is no averaging (and no search for weak
1440 lines). If such lines can affect the fitted baseline
1441 (e.g. a high order polynomial is fitted), increase this
1442 parameter (usually values up to 8 are reasonable). Most
1443 users of this method should find the default value
1444 sufficient.
1445 plot: plot the fit and the residual. In this each
1446 indivual fit has to be approved, by typing 'y'
1447 or 'n'
1448 insitu: if False a new scantable is returned.
1449 Otherwise, the scaling is done in-situ
1450 The default is taken from .asaprc (False)
1451
1452 Example:
1453 scan2=scan.auto_poly_baseline(order=7)
1454 """
1455 if insitu is None: insitu = rcParams['insitu']
1456 varlist = vars()
1457 from asap.asapfitter import fitter
1458 from asap.asaplinefind import linefinder
1459 from asap import _is_sequence_or_number as _is_valid
1460
1461 # check whether edge is set up for each IF individually
1462 individualedge = False;
1463 if len(edge) > 1:
1464 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1465 individualedge = True;
1466
1467 if not _is_valid(edge, int) and not individualedge:
1468 raise ValueError, "Parameter 'edge' has to be an integer or a \
1469 pair of integers specified as a tuple. Nested tuples are allowed \
1470 to make individual selection for different IFs."
1471
1472 curedge = (0, 0)
1473 if individualedge:
1474 for edgepar in edge:
1475 if not _is_valid(edgepar, int):
1476 raise ValueError, "Each element of the 'edge' tuple has \
1477 to be a pair of integers or an integer."
1478 else:
1479 curedge = edge;
1480
1481 # setup fitter
1482 f = fitter()
1483 f.set_function(poly=order)
1484
1485 # setup line finder
1486 fl = linefinder()
1487 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1488
1489 if not insitu:
1490 workscan = self.copy()
1491 else:
1492 workscan = self
1493
1494 fl.set_scan(workscan)
1495
1496 rows = range(workscan.nrow())
1497 # Save parameters of baseline fits & masklists as a class attribute.
1498 # NOTICE: It does not reflect changes in scantable!
1499 if len(rows) > 0:
1500 self.blpars=[]
1501 self.masklists=[]
1502 asaplog.push("Processing:")
1503 for r in rows:
1504 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1505 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1506 workscan.getpol(r), workscan.getcycle(r))
1507 asaplog.push(msg, False)
1508
1509 # figure out edge parameter
1510 if individualedge:
1511 if len(edge) >= workscan.getif(r):
1512 raise RuntimeError, "Number of edge elements appear to " \
1513 "be less than the number of IFs"
1514 curedge = edge[workscan.getif(r)]
1515
1516 # setup line finder
1517 fl.find_lines(r, mask, curedge)
1518 outmask=fl.get_mask()
1519 f.set_scan(workscan, fl.get_mask())
1520 f.x = workscan._getabcissa(r)
1521 f.y = workscan._getspectrum(r)
1522 f.data = None
1523 f.fit()
1524
1525 # Show mask list
1526 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1527 msg = "mask range: "+str(masklist)
1528 asaplog.push(msg, False)
1529
1530 fpar = f.get_parameters()
1531 if plot:
1532 f.plot(residual=True)
1533 x = raw_input("Accept fit ( [y]/n ): ")
1534 if x.upper() == 'N':
1535 self.blpars.append(None)
1536 self.masklists.append(None)
1537 continue
1538 workscan._setspectrum(f.fitter.getresidual(), r)
1539 self.blpars.append(fpar)
1540 self.masklists.append(masklist)
1541 if plot:
1542 f._p.unmap()
1543 f._p = None
1544 workscan._add_history("auto_poly_baseline", varlist)
1545 if insitu:
1546 self._assign(workscan)
1547 else:
1548 return workscan
1549
1550 def rotate_linpolphase(self, angle):
1551 """
1552 Rotate the phase of the complex polarization O=Q+iU correlation.
1553 This is always done in situ in the raw data. So if you call this
1554 function more than once then each call rotates the phase further.
1555 Parameters:
1556 angle: The angle (degrees) to rotate (add) by.
1557 Examples:
1558 scan.rotate_linpolphase(2.3)
1559 """
1560 varlist = vars()
1561 self._math._rotate_linpolphase(self, angle)
1562 self._add_history("rotate_linpolphase", varlist)
1563 print_log()
1564 return
1565
1566
1567 def rotate_xyphase(self, angle):
1568 """
1569 Rotate the phase of the XY correlation. This is always done in situ
1570 in the data. So if you call this function more than once
1571 then each call rotates the phase further.
1572 Parameters:
1573 angle: The angle (degrees) to rotate (add) by.
1574 Examples:
1575 scan.rotate_xyphase(2.3)
1576 """
1577 varlist = vars()
1578 self._math._rotate_xyphase(self, angle)
1579 self._add_history("rotate_xyphase", varlist)
1580 print_log()
1581 return
1582
1583 def swap_linears(self):
1584 """
1585 Swap the linear polarisations XX and YY, or better the first two
1586 polarisations as this also works for ciculars.
1587 """
1588 varlist = vars()
1589 self._math._swap_linears(self)
1590 self._add_history("swap_linears", varlist)
1591 print_log()
1592 return
1593
1594 def invert_phase(self):
1595 """
1596 Invert the phase of the complex polarisation
1597 """
1598 varlist = vars()
1599 self._math._invert_phase(self)
1600 self._add_history("invert_phase", varlist)
1601 print_log()
1602 return
1603
1604 def add(self, offset, insitu=None):
1605 """
1606 Return a scan where all spectra have the offset added
1607 Parameters:
1608 offset: the offset
1609 insitu: if False a new scantable is returned.
1610 Otherwise, the scaling is done in-situ
1611 The default is taken from .asaprc (False)
1612 """
1613 if insitu is None: insitu = rcParams['insitu']
1614 self._math._setinsitu(insitu)
1615 varlist = vars()
1616 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1617 s._add_history("add", varlist)
1618 print_log()
1619 if insitu:
1620 self._assign(s)
1621 else:
1622 return s
1623
1624 def scale(self, factor, tsys=True, insitu=None):
1625 """
1626 Return a scan where all spectra are scaled by the give 'factor'
1627 Parameters:
1628 factor: the scaling factor
1629 insitu: if False a new scantable is returned.
1630 Otherwise, the scaling is done in-situ
1631 The default is taken from .asaprc (False)
1632 tsys: if True (default) then apply the operation to Tsys
1633 as well as the data
1634 """
1635 if insitu is None: insitu = rcParams['insitu']
1636 self._math._setinsitu(insitu)
1637 varlist = vars()
1638 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1639 s._add_history("scale", varlist)
1640 print_log()
1641 if insitu:
1642 self._assign(s)
1643 else:
1644 return s
1645
1646 def auto_quotient(self, preserve=True, mode='paired'):
1647 """
1648 This function allows to build quotients automatically.
1649 It assumes the observation to have the same numer of
1650 "ons" and "offs"
1651 Parameters:
1652 preserve: you can preserve (default) the continuum or
1653 remove it. The equations used are
1654 preserve: Output = Toff * (on/off) - Toff
1655 remove: Output = Toff * (on/off) - Ton
1656 mode: the on/off detection mode
1657 'paired' (default)
1658 identifies 'off' scans by the
1659 trailing '_R' (Mopra/Parkes) or
1660 '_e'/'_w' (Tid) and matches
1661 on/off pairs from the observing pattern
1662 'time'
1663 finds the closest off in time
1664
1665 """
1666 modes = ["time", "paired"]
1667 if not mode in modes:
1668 msg = "please provide valid mode. Valid modes are %s" % (modes)
1669 raise ValueError(msg)
1670 varlist = vars()
1671 s = None
1672 if mode.lower() == "paired":
1673 basesel = self.get_selection()
1674 sel = selector()+basesel
1675 sel.set_query("SRCTYPE==1")
1676 self.set_selection(sel)
1677 offs = self.copy()
1678 sel.set_query("SRCTYPE==0")
1679 self.set_selection(sel)
1680 ons = self.copy()
1681 s = scantable(self._math._quotient(ons, offs, preserve))
1682 self.set_selection(basesel)
1683 elif mode.lower() == "time":
1684 s = scantable(self._math._auto_quotient(self, mode, preserve))
1685 s._add_history("auto_quotient", varlist)
1686 print_log()
1687 return s
1688
1689 def mx_quotient(self, mask = None, weight='median', preserve=True):
1690 """
1691 Form a quotient using "off" beams when observing in "MX" mode.
1692 Parameters:
1693 mask: an optional mask to be used when weight == 'stddev'
1694 weight: How to average the off beams. Default is 'median'.
1695 preserve: you can preserve (default) the continuum or
1696 remove it. The equations used are
1697 preserve: Output = Toff * (on/off) - Toff
1698 remove: Output = Toff * (on/off) - Ton
1699 """
1700 if mask is None: mask = ()
1701 varlist = vars()
1702 on = scantable(self._math._mx_extract(self, 'on'))
1703 preoff = scantable(self._math._mx_extract(self, 'off'))
1704 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1705 from asapmath import quotient
1706 q = quotient(on, off, preserve)
1707 q._add_history("mx_quotient", varlist)
1708 print_log()
1709 return q
1710
1711 def freq_switch(self, insitu=None):
1712 """
1713 Apply frequency switching to the data.
1714 Parameters:
1715 insitu: if False a new scantable is returned.
1716 Otherwise, the swictching is done in-situ
1717 The default is taken from .asaprc (False)
1718 Example:
1719 none
1720 """
1721 if insitu is None: insitu = rcParams['insitu']
1722 self._math._setinsitu(insitu)
1723 varlist = vars()
1724 s = scantable(self._math._freqswitch(self))
1725 s._add_history("freq_switch", varlist)
1726 print_log()
1727 if insitu: self._assign(s)
1728 else: return s
1729
1730 def recalc_azel(self):
1731 """
1732 Recalculate the azimuth and elevation for each position.
1733 Parameters:
1734 none
1735 Example:
1736 """
1737 varlist = vars()
1738 self._recalcazel()
1739 self._add_history("recalc_azel", varlist)
1740 print_log()
1741 return
1742
1743 def __add__(self, other):
1744 varlist = vars()
1745 s = None
1746 if isinstance(other, scantable):
1747 s = scantable(self._math._binaryop(self, other, "ADD"))
1748 elif isinstance(other, float):
1749 s = scantable(self._math._unaryop(self, other, "ADD", False))
1750 else:
1751 raise TypeError("Other input is not a scantable or float value")
1752 s._add_history("operator +", varlist)
1753 print_log()
1754 return s
1755
1756 def __sub__(self, other):
1757 """
1758 implicit on all axes and on Tsys
1759 """
1760 varlist = vars()
1761 s = None
1762 if isinstance(other, scantable):
1763 s = scantable(self._math._binaryop(self, other, "SUB"))
1764 elif isinstance(other, float):
1765 s = scantable(self._math._unaryop(self, other, "SUB", False))
1766 else:
1767 raise TypeError("Other input is not a scantable or float value")
1768 s._add_history("operator -", varlist)
1769 print_log()
1770 return s
1771
1772 def __mul__(self, other):
1773 """
1774 implicit on all axes and on Tsys
1775 """
1776 varlist = vars()
1777 s = None
1778 if isinstance(other, scantable):
1779 s = scantable(self._math._binaryop(self, other, "MUL"))
1780 elif isinstance(other, float):
1781 s = scantable(self._math._unaryop(self, other, "MUL", False))
1782 else:
1783 raise TypeError("Other input is not a scantable or float value")
1784 s._add_history("operator *", varlist)
1785 print_log()
1786 return s
1787
1788
1789 def __div__(self, other):
1790 """
1791 implicit on all axes and on Tsys
1792 """
1793 varlist = vars()
1794 s = None
1795 if isinstance(other, scantable):
1796 s = scantable(self._math._binaryop(self, other, "DIV"))
1797 elif isinstance(other, float):
1798 if other == 0.0:
1799 raise ZeroDivisionError("Dividing by zero is not recommended")
1800 s = scantable(self._math._unaryop(self, other, "DIV", False))
1801 else:
1802 raise TypeError("Other input is not a scantable or float value")
1803 s._add_history("operator /", varlist)
1804 print_log()
1805 return s
1806
1807 def get_fit(self, row=0):
1808 """
1809 Print or return the stored fits for a row in the scantable
1810 Parameters:
1811 row: the row which the fit has been applied to.
1812 """
1813 if row > self.nrow():
1814 return
1815 from asap.asapfit import asapfit
1816 fit = asapfit(self._getfit(row))
1817 if rcParams['verbose']:
1818 print fit
1819 return
1820 else:
1821 return fit.as_dict()
1822
1823 def _add_history(self, funcname, parameters):
1824 # create date
1825 sep = "##"
1826 from datetime import datetime
1827 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1828 hist = dstr+sep
1829 hist += funcname+sep#cdate+sep
1830 if parameters.has_key('self'): del parameters['self']
1831 for k, v in parameters.iteritems():
1832 if type(v) is dict:
1833 for k2, v2 in v.iteritems():
1834 hist += k2
1835 hist += "="
1836 if isinstance(v2, scantable):
1837 hist += 'scantable'
1838 elif k2 == 'mask':
1839 if isinstance(v2, list) or isinstance(v2, tuple):
1840 hist += str(self._zip_mask(v2))
1841 else:
1842 hist += str(v2)
1843 else:
1844 hist += str(v2)
1845 else:
1846 hist += k
1847 hist += "="
1848 if isinstance(v, scantable):
1849 hist += 'scantable'
1850 elif k == 'mask':
1851 if isinstance(v, list) or isinstance(v, tuple):
1852 hist += str(self._zip_mask(v))
1853 else:
1854 hist += str(v)
1855 else:
1856 hist += str(v)
1857 hist += sep
1858 hist = hist[:-2] # remove trailing '##'
1859 self._addhistory(hist)
1860
1861
1862 def _zip_mask(self, mask):
1863 mask = list(mask)
1864 i = 0
1865 segments = []
1866 while mask[i:].count(1):
1867 i += mask[i:].index(1)
1868 if mask[i:].count(0):
1869 j = i + mask[i:].index(0)
1870 else:
1871 j = len(mask)
1872 segments.append([i, j])
1873 i = j
1874 return segments
1875
1876 def _get_ordinate_label(self):
1877 fu = "("+self.get_fluxunit()+")"
1878 import re
1879 lbl = "Intensity"
1880 if re.match(".K.", fu):
1881 lbl = "Brightness Temperature "+ fu
1882 elif re.match(".Jy.", fu):
1883 lbl = "Flux density "+ fu
1884 return lbl
1885
1886 def _check_ifs(self):
1887 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1888 nchans = filter(lambda t: t > 0, nchans)
1889 return (sum(nchans)/len(nchans) == nchans[0])
1890
1891 def _fill(self, names, unit, average):
1892 import os
1893 from asap._asap import stfiller
1894 first = True
1895 fullnames = []
1896 for name in names:
1897 name = os.path.expandvars(name)
1898 name = os.path.expanduser(name)
1899 if not os.path.exists(name):
1900 msg = "File '%s' does not exists" % (name)
1901 if rcParams['verbose']:
1902 asaplog.push(msg)
1903 print asaplog.pop().strip()
1904 return
1905 raise IOError(msg)
1906 fullnames.append(name)
1907 if average:
1908 asaplog.push('Auto averaging integrations')
1909 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1910 for name in fullnames:
1911 tbl = Scantable(stype)
1912 r = stfiller(tbl)
1913 msg = "Importing %s..." % (name)
1914 asaplog.push(msg, False)
1915 print_log()
1916 r._open(name, -1, -1)
1917 r._read()
1918 #tbl = r._getdata()
1919 if average:
1920 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1921 #tbl = tbl2
1922 if not first:
1923 tbl = self._math._merge([self, tbl])
1924 #tbl = tbl2
1925 Scantable.__init__(self, tbl)
1926 r._close()
1927 del r, tbl
1928 first = False
1929 if unit is not None:
1930 self.set_fluxunit(unit)
1931 #self.set_freqframe(rcParams['scantable.freqframe'])
1932
Note: See TracBrowser for help on using the repository browser.