source: branches/alma/python/scantable.py@ 1508

Last change on this file since 1508 was 1496, checked in by TakTsutsumi, 16 years ago

New Development: No

JIRA Issue: No

Ready to Release: Yes

Interface Changes: Yes

What Interface Changed: a new boolean, getpt

Test Programs: List test programs

Put in Release Notes: No

Module(s): Module Names change impacts.

Description: Added a boolean, getpt to init()

and _fill() to allow control over
pointing data filling.
Also commented out a line forces
the default frequency
frame setting when scantble is read.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 72.6 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None, getpt=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 if getpt is None:
36 getpt = False
37 varlist = vars()
38 from asap._asap import stmath
39 self._math = stmath()
40 if isinstance(filename, Scantable):
41 Scantable.__init__(self, filename)
42 else:
43 if isinstance(filename, str):
44 import os.path
45 filename = os.path.expandvars(filename)
46 filename = os.path.expanduser(filename)
47 if not os.path.exists(filename):
48 s = "File '%s' not found." % (filename)
49 if rcParams['verbose']:
50 asaplog.push(s)
51 print asaplog.pop().strip()
52 return
53 raise IOError(s)
54 if os.path.isdir(filename) \
55 and not os.path.exists(filename+'/table.f1'):
56 # crude check if asap table
57 if os.path.exists(filename+'/table.info'):
58 ondisk = rcParams['scantable.storage'] == 'disk'
59 Scantable.__init__(self, filename, ondisk)
60 if unit is not None:
61 self.set_fluxunit(unit)
62 # do not reset to the default freqframe
63 #self.set_freqframe(rcParams['scantable.freqframe'])
64 else:
65 msg = "The given file '%s'is not a valid " \
66 "asap table." % (filename)
67 if rcParams['verbose']:
68 print msg
69 return
70 else:
71 raise IOError(msg)
72 else:
73 self._fill([filename], unit, average, getpt)
74 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
75 and isinstance(filename[-1], str):
76 self._fill(filename, unit, average, getpt)
77 self._add_history("scantable", varlist)
78 print_log()
79
80 def save(self, name=None, format=None, overwrite=False):
81 """
82 Store the scantable on disk. This can be an asap (aips++) Table,
83 SDFITS or MS2 format.
84 Parameters:
85 name: the name of the outputfile. For format "ASCII"
86 this is the root file name (data in 'name'.txt
87 and header in 'name'_header.txt)
88 format: an optional file format. Default is ASAP.
89 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
90 'SDFITS' (save as SDFITS file)
91 'ASCII' (saves as ascii text file)
92 'MS2' (saves as an aips++
93 MeasurementSet V2)
94 overwrite: If the file should be overwritten if it exists.
95 The default False is to return with warning
96 without writing the output. USE WITH CARE.
97 Example:
98 scan.save('myscan.asap')
99 scan.save('myscan.sdfits', 'SDFITS')
100 """
101 from os import path
102 if format is None: format = rcParams['scantable.save']
103 suffix = '.'+format.lower()
104 if name is None or name == "":
105 name = 'scantable'+suffix
106 msg = "No filename given. Using default name %s..." % name
107 asaplog.push(msg)
108 name = path.expandvars(name)
109 if path.isfile(name) or path.isdir(name):
110 if not overwrite:
111 msg = "File %s exists." % name
112 if rcParams['verbose']:
113 print msg
114 return
115 else:
116 raise IOError(msg)
117 format2 = format.upper()
118 if format2 == 'ASAP':
119 self._save(name)
120 else:
121 from asap._asap import stwriter as stw
122 writer = stw(format2)
123 writer.write(self, name)
124 print_log()
125 return
126
127 def copy(self):
128 """
129 Return a copy of this scantable.
130 Note:
131 This makes a full (deep) copy. scan2 = scan1 makes a reference.
132 Parameters:
133 none
134 Example:
135 copiedscan = scan.copy()
136 """
137 sd = scantable(Scantable._copy(self))
138 return sd
139
140 def drop_scan(self, scanid=None):
141 """
142 Return a new scantable where the specified scan number(s) has(have)
143 been dropped.
144 Parameters:
145 scanid: a (list of) scan number(s)
146 """
147 from asap import _is_sequence_or_number as _is_valid
148 from asap import _to_list
149 from asap import unique
150 if not _is_valid(scanid):
151 if rcParams['verbose']:
152 print "Please specify a scanno to drop from the scantable"
153 return
154 else:
155 raise RuntimeError("No scan given")
156 try:
157 scanid = _to_list(scanid)
158 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
159 for sid in scanid: allscans.remove(sid)
160 if len(allscans) == 0:
161 raise ValueError("Can't remove all scans")
162 except ValueError:
163 if rcParams['verbose']:
164 print "Couldn't find any match."
165 return
166 else: raise
167 try:
168 bsel = self.get_selection()
169 sel = selector()
170 sel.set_scans(allscans)
171 self.set_selection(bsel+sel)
172 scopy = self._copy()
173 self.set_selection(bsel)
174 return scantable(scopy)
175 except RuntimeError:
176 if rcParams['verbose']:
177 print "Couldn't find any match."
178 else:
179 raise
180
181
182 def get_scan(self, scanid=None):
183 """
184 Return a specific scan (by scanno) or collection of scans (by
185 source name) in a new scantable.
186 Note:
187 See scantable.drop_scan() for the inverse operation.
188 Parameters:
189 scanid: a (list of) scanno or a source name, unix-style
190 patterns are accepted for source name matching, e.g.
191 '*_R' gets all 'ref scans
192 Example:
193 # get all scans containing the source '323p459'
194 newscan = scan.get_scan('323p459')
195 # get all 'off' scans
196 refscans = scan.get_scan('*_R')
197 # get a susbset of scans by scanno (as listed in scan.summary())
198 newscan = scan.get_scan([0, 2, 7, 10])
199 """
200 if scanid is None:
201 if rcParams['verbose']:
202 print "Please specify a scan no or name to " \
203 "retrieve from the scantable"
204 return
205 else:
206 raise RuntimeError("No scan given")
207
208 try:
209 bsel = self.get_selection()
210 sel = selector()
211 if type(scanid) is str:
212 sel.set_name(scanid)
213 self.set_selection(bsel+sel)
214 scopy = self._copy()
215 self.set_selection(bsel)
216 return scantable(scopy)
217 elif type(scanid) is int:
218 sel.set_scans([scanid])
219 self.set_selection(bsel+sel)
220 scopy = self._copy()
221 self.set_selection(bsel)
222 return scantable(scopy)
223 elif type(scanid) is list:
224 sel.set_scans(scanid)
225 self.set_selection(sel)
226 scopy = self._copy()
227 self.set_selection(bsel)
228 return scantable(scopy)
229 else:
230 msg = "Illegal scanid type, use 'int' or 'list' if ints."
231 if rcParams['verbose']:
232 print msg
233 else:
234 raise TypeError(msg)
235 except RuntimeError:
236 if rcParams['verbose']: print "Couldn't find any match."
237 else: raise
238
239 def __str__(self):
240 return Scantable._summary(self, True)
241
242 def summary(self, filename=None):
243 """
244 Print a summary of the contents of this scantable.
245 Parameters:
246 filename: the name of a file to write the putput to
247 Default - no file output
248 verbose: print extra info such as the frequency table
249 The default (False) is taken from .asaprc
250 """
251 info = Scantable._summary(self, True)
252 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
253 if filename is not None:
254 if filename is "":
255 filename = 'scantable_summary.txt'
256 from os.path import expandvars, isdir
257 filename = expandvars(filename)
258 if not isdir(filename):
259 data = open(filename, 'w')
260 data.write(info)
261 data.close()
262 else:
263 msg = "Illegal file name '%s'." % (filename)
264 if rcParams['verbose']:
265 print msg
266 else:
267 raise IOError(msg)
268 if rcParams['verbose']:
269 try:
270 from IPython.genutils import page as pager
271 except ImportError:
272 from pydoc import pager
273 pager(info)
274 else:
275 return info
276
277
278 def get_selection(self):
279 """
280 Get the selection object currently set on this scantable.
281 Parameters:
282 none
283 Example:
284 sel = scan.get_selection()
285 sel.set_ifs(0) # select IF 0
286 scan.set_selection(sel) # apply modified selection
287 """
288 return selector(self._getselection())
289
290 def set_selection(self, selection=selector()):
291 """
292 Select a subset of the data. All following operations on this scantable
293 are only applied to thi selection.
294 Parameters:
295 selection: a selector object (default unset the selection)
296 Examples:
297 sel = selector() # create a selection object
298 self.set_scans([0, 3]) # select SCANNO 0 and 3
299 scan.set_selection(sel) # set the selection
300 scan.summary() # will only print summary of scanno 0 an 3
301 scan.set_selection() # unset the selection
302 """
303 self._setselection(selection)
304
305 def get_row(self, row=0, insitu=None):
306 """
307 Select a row in the scantable.
308 Return a scantable with single row.
309 Parameters:
310 row: row no of integration, default is 0.
311 insitu: if False a new scantable is returned.
312 Otherwise, the scaling is done in-situ
313 The default is taken from .asaprc (False)
314 """
315 if insitu is None: insitu = rcParams['insitu']
316 if not insitu:
317 workscan = self.copy()
318 else:
319 workscan = self
320 # Select a row
321 sel=selector()
322 sel.set_scans([workscan.getscan(row)])
323 sel.set_cycles([workscan.getcycle(row)])
324 sel.set_beams([workscan.getbeam(row)])
325 sel.set_ifs([workscan.getif(row)])
326 sel.set_polarisations([workscan.getpol(row)])
327 sel.set_name(workscan._getsourcename(row))
328 workscan.set_selection(sel)
329 if not workscan.nrow() == 1:
330 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
331 raise RuntimeError(msg)
332 del sel
333 if insitu:
334 self._assign(workscan)
335 else:
336 return workscan
337
338 def stats(self, stat='stddev', mask=None):
339 """
340 Determine the specified statistic of the current beam/if/pol
341 Takes a 'mask' as an optional parameter to specify which
342 channels should be excluded.
343 Parameters:
344 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
345 'var', 'stddev', 'avdev', 'rms', 'median'
346 mask: an optional mask specifying where the statistic
347 should be determined.
348 Example:
349 scan.set_unit('channel')
350 msk = scan.create_mask([100, 200], [500, 600])
351 scan.stats(stat='mean', mask=m)
352 """
353 if mask == None:
354 mask = []
355 axes = ['Beam', 'IF', 'Pol', 'Time']
356 if not self._check_ifs():
357 raise ValueError("Cannot apply mask as the IFs have different "
358 "number of channels. Please use setselection() "
359 "to select individual IFs")
360
361 statvals = self._math._stats(self, mask, stat)
362 out = ''
363 axes = []
364 for i in range(self.nrow()):
365 axis = []
366 axis.append(self.getscan(i))
367 axis.append(self.getbeam(i))
368 axis.append(self.getif(i))
369 axis.append(self.getpol(i))
370 axis.append(self.getcycle(i))
371 axes.append(axis)
372 tm = self._gettime(i)
373 src = self._getsourcename(i)
374 out += 'Scan[%d] (%s) ' % (axis[0], src)
375 out += 'Time[%s]:\n' % (tm)
376 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
377 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
378 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
379 out += '= %3.3f\n' % (statvals[i])
380 out += "--------------------------------------------------\n"
381
382 if rcParams['verbose']:
383 print "--------------------------------------------------"
384 print " ", stat
385 print "--------------------------------------------------"
386 print out
387 #else:
388 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
389 # 'axes' : axes,
390 # 'data': statvals}
391 return statvals
392
393 def stddev(self, mask=None):
394 """
395 Determine the standard deviation of the current beam/if/pol
396 Takes a 'mask' as an optional parameter to specify which
397 channels should be excluded.
398 Parameters:
399 mask: an optional mask specifying where the standard
400 deviation should be determined.
401
402 Example:
403 scan.set_unit('channel')
404 msk = scan.create_mask([100, 200], [500, 600])
405 scan.stddev(mask=m)
406 """
407 return self.stats(stat='stddev', mask=mask);
408
409
410 def get_column_names(self):
411 """
412 Return a list of column names, which can be used for selection.
413 """
414 return list(Scantable.get_column_names(self))
415
416 def get_tsys(self):
417 """
418 Return the System temperatures.
419 Returns:
420 a list of Tsys values for the current selection
421 """
422
423 return self._row_callback(self._gettsys, "Tsys")
424
425 def _row_callback(self, callback, label):
426 axes = []
427 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
428 out = ""
429 outvec = []
430 for i in range(self.nrow()):
431 axis = []
432 axis.append(self.getscan(i))
433 axis.append(self.getbeam(i))
434 axis.append(self.getif(i))
435 axis.append(self.getpol(i))
436 axis.append(self.getcycle(i))
437 axes.append(axis)
438 tm = self._gettime(i)
439 src = self._getsourcename(i)
440 out += 'Scan[%d] (%s) ' % (axis[0], src)
441 out += 'Time[%s]:\n' % (tm)
442 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
443 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
444 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
445 outvec.append(callback(i))
446 out += '= %3.3f\n' % (outvec[i])
447 out += "--------------------------------------------------\n"
448 if rcParams['verbose']:
449 print "--------------------------------------------------"
450 print " %s" % (label)
451 print "--------------------------------------------------"
452 print out
453 # disabled because the vector seems more useful
454 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
455 return outvec
456
457 def _get_column(self, callback, row=-1):
458 """
459 """
460 if row == -1:
461 return [callback(i) for i in range(self.nrow())]
462 else:
463 if 0 <= row < self.nrow():
464 return callback(row)
465
466
467 def get_time(self, row=-1, asdatetime=False):
468 """
469 Get a list of time stamps for the observations.
470 Return a datetime object for each integration time stamp in the scantable.
471 Parameters:
472 row: row no of integration. Default -1 return all rows
473 asdatetime: return values as datetime objects rather than strings
474 Example:
475 none
476 """
477 from time import strptime
478 from datetime import datetime
479 times = self._get_column(self._gettime, row)
480 if not asdatetime:
481 return times
482 format = "%Y/%m/%d/%H:%M:%S"
483 if isinstance(times, list):
484 return [datetime(*strptime(i, format)[:6]) for i in times]
485 else:
486 return datetime(*strptime(times, format)[:6])
487
488
489 def get_inttime(self, row=-1):
490 """
491 Get a list of integration times for the observations.
492 Return a time in seconds for each integration in the scantable.
493 Parameters:
494 row: row no of integration. Default -1 return all rows.
495 Example:
496 none
497 """
498 return self._get_column(self._getinttime, row)
499
500
501 def get_sourcename(self, row=-1):
502 """
503 Get a list source names for the observations.
504 Return a string for each integration in the scantable.
505 Parameters:
506 row: row no of integration. Default -1 return all rows.
507 Example:
508 none
509 """
510 return self._get_column(self._getsourcename, row)
511
512 def get_elevation(self, row=-1):
513 """
514 Get a list of elevations for the observations.
515 Return a float for each integration in the scantable.
516 Parameters:
517 row: row no of integration. Default -1 return all rows.
518 Example:
519 none
520 """
521 return self._get_column(self._getelevation, row)
522
523 def get_azimuth(self, row=-1):
524 """
525 Get a list of azimuths for the observations.
526 Return a float for each integration in the scantable.
527 Parameters:
528 row: row no of integration. Default -1 return all rows.
529 Example:
530 none
531 """
532 return self._get_column(self._getazimuth, row)
533
534 def get_parangle(self, row=-1):
535 """
536 Get a list of parallactic angles for the observations.
537 Return a float for each integration in the scantable.
538 Parameters:
539 row: row no of integration. Default -1 return all rows.
540 Example:
541 none
542 """
543 return self._get_column(self._getparangle, row)
544
545 def get_direction(self, row=-1):
546 """
547 Get a list of Positions on the sky (direction) for the observations.
548 Return a float for each integration in the scantable.
549 Parameters:
550 row: row no of integration. Default -1 return all rows
551 Example:
552 none
553 """
554 return self._get_column(self._getdirection, row)
555
556 def get_directionval(self, row=-1):
557 """
558 Get a list of Positions on the sky (direction) for the observations.
559 Return a float for each integration in the scantable.
560 Parameters:
561 row: row no of integration. Default -1 return all rows
562 Example:
563 none
564 """
565 return self._get_column(self._getdirectionvec, row)
566
567 def set_unit(self, unit='channel'):
568 """
569 Set the unit for all following operations on this scantable
570 Parameters:
571 unit: optional unit, default is 'channel'
572 one of '*Hz', 'km/s', 'channel', ''
573 """
574 varlist = vars()
575 if unit in ['', 'pixel', 'channel']:
576 unit = ''
577 inf = list(self._getcoordinfo())
578 inf[0] = unit
579 self._setcoordinfo(inf)
580 self._add_history("set_unit", varlist)
581
582 def set_instrument(self, instr):
583 """
584 Set the instrument for subsequent processing.
585 Parameters:
586 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
587 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
588 """
589 self._setInstrument(instr)
590 self._add_history("set_instument", vars())
591 print_log()
592
593 def set_feedtype(self, feedtype):
594 """
595 Overwrite the feed type, which might not be set correctly.
596 Parameters:
597 feedtype: 'linear' or 'circular'
598 """
599 self._setfeedtype(feedtype)
600 self._add_history("set_feedtype", vars())
601 print_log()
602
603 def set_doppler(self, doppler='RADIO'):
604 """
605 Set the doppler for all following operations on this scantable.
606 Parameters:
607 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
608 """
609 varlist = vars()
610 inf = list(self._getcoordinfo())
611 inf[2] = doppler
612 self._setcoordinfo(inf)
613 self._add_history("set_doppler", vars())
614 print_log()
615
616 def set_freqframe(self, frame=None):
617 """
618 Set the frame type of the Spectral Axis.
619 Parameters:
620 frame: an optional frame type, default 'LSRK'. Valid frames are:
621 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
622 'GEO', 'GALACTO', 'LGROUP', 'CMB'
623 Examples:
624 scan.set_freqframe('BARY')
625 """
626 if frame is None: frame = rcParams['scantable.freqframe']
627 varlist = vars()
628 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
629 'GEO', 'GALACTO', 'LGROUP', 'CMB']
630
631 if frame in valid:
632 inf = list(self._getcoordinfo())
633 inf[1] = frame
634 self._setcoordinfo(inf)
635 self._add_history("set_freqframe", varlist)
636 else:
637 msg = "Please specify a valid freq type. Valid types are:\n", valid
638 if rcParams['verbose']:
639 print msg
640 else:
641 raise TypeError(msg)
642 print_log()
643
644 def set_dirframe(self, frame=""):
645 """
646 Set the frame type of the Direction on the sky.
647 Parameters:
648 frame: an optional frame type, default ''. Valid frames are:
649 'J2000', 'B1950', 'GALACTIC'
650 Examples:
651 scan.set_dirframe('GALACTIC')
652 """
653 varlist = vars()
654 try:
655 Scantable.set_dirframe(self, frame)
656 except RuntimeError, msg:
657 if rcParams['verbose']:
658 print msg
659 else:
660 raise
661 self._add_history("set_dirframe", varlist)
662
663 def get_unit(self):
664 """
665 Get the default unit set in this scantable
666 Returns:
667 A unit string
668 """
669 inf = self._getcoordinfo()
670 unit = inf[0]
671 if unit == '': unit = 'channel'
672 return unit
673
674 def get_abcissa(self, rowno=0):
675 """
676 Get the abcissa in the current coordinate setup for the currently
677 selected Beam/IF/Pol
678 Parameters:
679 rowno: an optional row number in the scantable. Default is the
680 first row, i.e. rowno=0
681 Returns:
682 The abcissa values and the format string (as a dictionary)
683 """
684 abc = self._getabcissa(rowno)
685 lbl = self._getabcissalabel(rowno)
686 print_log()
687 return abc, lbl
688
689 def flag(self, mask=None, unflag=False):
690 """
691 Flag the selected data using an optional channel mask.
692 Parameters:
693 mask: an optional channel mask, created with create_mask. Default
694 (no mask) is all channels.
695 unflag: if True, unflag the data
696 """
697 varlist = vars()
698 if mask is None:
699 mask = []
700 try:
701 self._flag(mask, unflag)
702 except RuntimeError, msg:
703 if rcParams['verbose']:
704 print msg
705 return
706 else: raise
707 self._add_history("flag", varlist)
708
709 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
710 """
711 Flag the data in 'lag' space by providing a frequency to remove.
712 Flagged data in the scantable gets set to 0.0 before the fft.
713 No taper is applied.
714 Parameters:
715 frequency: the frequency (really a period within the bandwidth)
716 to remove
717 width: the width of the frequency to remove, to remove a
718 range of frequencies aroung the centre.
719 unit: the frequency unit (default "GHz")
720 Notes:
721 It is recommended to flag edges of the band or strong
722 signals beforehand.
723 """
724 if insitu is None: insitu = rcParams['insitu']
725 self._math._setinsitu(insitu)
726 varlist = vars()
727 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
728 if not base.has_key(unit):
729 raise ValueError("%s is not a valid unit." % unit)
730 try:
731 s = scantable(self._math._lag_flag(self, frequency*base[unit],
732 width*base[unit]))
733 except RuntimeError, msg:
734 if rcParams['verbose']:
735 print msg
736 return
737 else: raise
738 s._add_history("lag_flag", varlist)
739 print_log()
740 if insitu:
741 self._assign(s)
742 else:
743 return s
744
745
746 def create_mask(self, *args, **kwargs):
747 """
748 Compute and return a mask based on [min, max] windows.
749 The specified windows are to be INCLUDED, when the mask is
750 applied.
751 Parameters:
752 [min, max], [min2, max2], ...
753 Pairs of start/end points (inclusive)specifying the regions
754 to be masked
755 invert: optional argument. If specified as True,
756 return an inverted mask, i.e. the regions
757 specified are EXCLUDED
758 row: create the mask using the specified row for
759 unit conversions, default is row=0
760 only necessary if frequency varies over rows.
761 Example:
762 scan.set_unit('channel')
763 a)
764 msk = scan.create_mask([400, 500], [800, 900])
765 # masks everything outside 400 and 500
766 # and 800 and 900 in the unit 'channel'
767
768 b)
769 msk = scan.create_mask([400, 500], [800, 900], invert=True)
770 # masks the regions between 400 and 500
771 # and 800 and 900 in the unit 'channel'
772 c)
773 mask only channel 400
774 msk = scan.create_mask([400, 400])
775 """
776 row = 0
777 if kwargs.has_key("row"):
778 row = kwargs.get("row")
779 data = self._getabcissa(row)
780 u = self._getcoordinfo()[0]
781 if rcParams['verbose']:
782 if u == "": u = "channel"
783 msg = "The current mask window unit is %s" % u
784 i = self._check_ifs()
785 if not i:
786 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
787 asaplog.push(msg)
788 n = self.nchan()
789 msk = _n_bools(n, False)
790 # test if args is a 'list' or a 'normal *args - UGLY!!!
791
792 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
793 and args or args[0]
794 for window in ws:
795 if (len(window) != 2 or window[0] > window[1] ):
796 raise TypeError("A window needs to be defined as [min, max]")
797 for i in range(n):
798 if data[i] >= window[0] and data[i] <= window[1]:
799 msk[i] = True
800 if kwargs.has_key('invert'):
801 if kwargs.get('invert'):
802 msk = mask_not(msk)
803 print_log()
804 return msk
805
806 def get_masklist(self, mask=None, row=0):
807 """
808 Compute and return a list of mask windows, [min, max].
809 Parameters:
810 mask: channel mask, created with create_mask.
811 row: calcutate the masklist using the specified row
812 for unit conversions, default is row=0
813 only necessary if frequency varies over rows.
814 Returns:
815 [min, max], [min2, max2], ...
816 Pairs of start/end points (inclusive)specifying
817 the masked regions
818 """
819 if not (isinstance(mask,list) or isinstance(mask, tuple)):
820 raise TypeError("The mask should be list or tuple.")
821 if len(mask) < 2:
822 raise TypeError("The mask elements should be > 1")
823 if self.nchan() != len(mask):
824 msg = "Number of channels in scantable != number of mask elements"
825 raise TypeError(msg)
826 data = self._getabcissa(row)
827 u = self._getcoordinfo()[0]
828 if rcParams['verbose']:
829 if u == "": u = "channel"
830 msg = "The current mask window unit is %s" % u
831 i = self._check_ifs()
832 if not i:
833 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
834 asaplog.push(msg)
835 masklist=[]
836 ist, ien = None, None
837 ist, ien=self.get_mask_indices(mask)
838 if ist is not None and ien is not None:
839 for i in xrange(len(ist)):
840 range=[data[ist[i]],data[ien[i]]]
841 range.sort()
842 masklist.append([range[0],range[1]])
843 return masklist
844
845 def get_mask_indices(self, mask=None):
846 """
847 Compute and Return lists of mask start indices and mask end indices.
848 Parameters:
849 mask: channel mask, created with create_mask.
850 Returns:
851 List of mask start indices and that of mask end indices,
852 i.e., [istart1,istart2,....], [iend1,iend2,....].
853 """
854 if not (isinstance(mask,list) or isinstance(mask, tuple)):
855 raise TypeError("The mask should be list or tuple.")
856 if len(mask) < 2:
857 raise TypeError("The mask elements should be > 1")
858 istart=[]
859 iend=[]
860 if mask[0]: istart.append(0)
861 for i in range(len(mask)-1):
862 if not mask[i] and mask[i+1]:
863 istart.append(i+1)
864 elif mask[i] and not mask[i+1]:
865 iend.append(i)
866 if mask[len(mask)-1]: iend.append(len(mask)-1)
867 if len(istart) != len(iend):
868 raise RuntimeError("Numbers of mask start != mask end.")
869 for i in range(len(istart)):
870 if istart[i] > iend[i]:
871 raise RuntimeError("Mask start index > mask end index")
872 break
873 return istart,iend
874
875# def get_restfreqs(self):
876# """
877# Get the restfrequency(s) stored in this scantable.
878# The return value(s) are always of unit 'Hz'
879# Parameters:
880# none
881# Returns:
882# a list of doubles
883# """
884# return list(self._getrestfreqs())
885
886 def get_restfreqs(self, ids=None):
887 """
888 Get the restfrequency(s) stored in this scantable.
889 The return value(s) are always of unit 'Hz'
890 Parameters:
891 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
892 be retrieved
893 Returns:
894 dictionary containing ids and a list of doubles for each id
895 """
896 if ids is None:
897 rfreqs={}
898 idlist = self.getmolnos()
899 for i in idlist:
900 rfreqs[i]=list(self._getrestfreqs(i))
901 return rfreqs
902 else:
903 if type(ids)==list or type(ids)==tuple:
904 rfreqs={}
905 for i in ids:
906 rfreqs[i]=list(self._getrestfreqs(i))
907 return rfreqs
908 else:
909 return list(self._getrestfreqs(ids))
910 #return list(self._getrestfreqs(ids))
911
912 def set_restfreqs(self, freqs=None, unit='Hz'):
913 """
914 ********NEED TO BE UPDATED begin************
915 Set or replace the restfrequency specified and
916 If the 'freqs' argument holds a scalar,
917 then that rest frequency will be applied to all the selected
918 data. If the 'freqs' argument holds
919 a vector, then it MUST be of equal or smaller length than
920 the number of IFs (and the available restfrequencies will be
921 replaced by this vector). In this case, *all* data have
922 the restfrequency set per IF according
923 to the corresponding value you give in the 'freqs' vector.
924 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
925 IF 1 gets restfreq 2e9.
926 ********NEED TO BE UPDATED end************
927 You can also specify the frequencies via a linecatalog/
928
929 Parameters:
930 freqs: list of rest frequency values or string idenitfiers
931 unit: unit for rest frequency (default 'Hz')
932
933 Example:
934 # set the given restfrequency for the all currently selected IFs
935 scan.set_restfreqs(freqs=1.4e9)
936 # set multiple restfrequencies to all the selected data
937 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
938 # If the number of IFs in the data is >= 2 the IF0 gets the first
939 # value IF1 the second... NOTE that freqs needs to be
940 # specified in list of list (e.g. [[],[],...] ).
941 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
942 #set the given restfrequency for the whole table (by name)
943 scan.set_restfreqs(freqs="OH1667")
944
945 Note:
946 To do more sophisticate Restfrequency setting, e.g. on a
947 source and IF basis, use scantable.set_selection() before using
948 this function.
949 # provide your scantable is call scan
950 selection = selector()
951 selection.set_name("ORION*")
952 selection.set_ifs([1])
953 scan.set_selection(selection)
954 scan.set_restfreqs(freqs=86.6e9)
955
956 """
957 varlist = vars()
958 from asap import linecatalog
959 # simple value
960 if isinstance(freqs, int) or isinstance(freqs, float):
961 # TT mod
962 #self._setrestfreqs(freqs, "",unit)
963 self._setrestfreqs([freqs], [""],unit)
964 # list of values
965 elif isinstance(freqs, list) or isinstance(freqs, tuple):
966 # list values are scalars
967 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
968 self._setrestfreqs(freqs, [""],unit)
969 # list values are tuples, (value, name)
970 elif isinstance(freqs[-1], dict):
971 #sel = selector()
972 #savesel = self._getselection()
973 #iflist = self.getifnos()
974 #for i in xrange(len(freqs)):
975 # sel.set_ifs(iflist[i])
976 # self._setselection(sel)
977 # self._setrestfreqs(freqs[i], "",unit)
978 #self._setselection(savesel)
979 self._setrestfreqs(freqs["value"],
980 freqs["name"], "MHz")
981 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
982 sel = selector()
983 savesel = self._getselection()
984 iflist = self.getifnos()
985 if len(freqs)>len(iflist):
986 raise ValueError("number of elements in list of list exeeds the current IF selections")
987 for i in xrange(len(freqs)):
988 sel.set_ifs(iflist[i])
989 self._setselection(sel)
990 self._setrestfreqs(freqs[i]["value"],
991 freqs[i]["name"], "MHz")
992 self._setselection(savesel)
993 # freqs are to be taken from a linecatalog
994 elif isinstance(freqs, linecatalog):
995 sel = selector()
996 savesel = self._getselection()
997 for i in xrange(freqs.nrow()):
998 sel.set_ifs(iflist[i])
999 self._setselection(sel)
1000 self._setrestfreqs(freqs.get_frequency(i),
1001 freqs.get_name(i), "MHz")
1002 # ensure that we are not iterating past nIF
1003 if i == self.nif()-1: break
1004 self._setselection(savesel)
1005 else:
1006 return
1007 self._add_history("set_restfreqs", varlist)
1008
1009 def shift_refpix(self, delta):
1010 """
1011 Shift the reference pixel of the Spectra Coordinate by an
1012 integer amount.
1013 Parameters:
1014 delta: the amount to shift by
1015 Note:
1016 Be careful using this with broadband data.
1017 """
1018 Scantable.shift(self, delta)
1019
1020 def history(self, filename=None):
1021 """
1022 Print the history. Optionally to a file.
1023 Parameters:
1024 filename: The name of the file to save the history to.
1025 """
1026 hist = list(self._gethistory())
1027 out = "-"*80
1028 for h in hist:
1029 if h.startswith("---"):
1030 out += "\n"+h
1031 else:
1032 items = h.split("##")
1033 date = items[0]
1034 func = items[1]
1035 items = items[2:]
1036 out += "\n"+date+"\n"
1037 out += "Function: %s\n Parameters:" % (func)
1038 for i in items:
1039 s = i.split("=")
1040 out += "\n %s = %s" % (s[0], s[1])
1041 out += "\n"+"-"*80
1042 if filename is not None:
1043 if filename is "":
1044 filename = 'scantable_history.txt'
1045 import os
1046 filename = os.path.expandvars(os.path.expanduser(filename))
1047 if not os.path.isdir(filename):
1048 data = open(filename, 'w')
1049 data.write(out)
1050 data.close()
1051 else:
1052 msg = "Illegal file name '%s'." % (filename)
1053 if rcParams['verbose']:
1054 print msg
1055 else:
1056 raise IOError(msg)
1057 if rcParams['verbose']:
1058 try:
1059 from IPython.genutils import page as pager
1060 except ImportError:
1061 from pydoc import pager
1062 pager(out)
1063 else:
1064 return out
1065 return
1066 #
1067 # Maths business
1068 #
1069
1070 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1071 """
1072 Return the (time) weighted average of a scan.
1073 Note:
1074 in channels only - align if necessary
1075 Parameters:
1076 mask: an optional mask (only used for 'var' and 'tsys'
1077 weighting)
1078 scanav: True averages each scan separately
1079 False (default) averages all scans together,
1080 weight: Weighting scheme.
1081 'none' (mean no weight)
1082 'var' (1/var(spec) weighted)
1083 'tsys' (1/Tsys**2 weighted)
1084 'tint' (integration time weighted)
1085 'tintsys' (Tint/Tsys**2)
1086 'median' ( median averaging)
1087 The default is 'tint'
1088 align: align the spectra in velocity before averaging. It takes
1089 the time of the first spectrum as reference time.
1090 Example:
1091 # time average the scantable without using a mask
1092 newscan = scan.average_time()
1093 """
1094 varlist = vars()
1095 if weight is None: weight = 'TINT'
1096 if mask is None: mask = ()
1097 if scanav: scanav = "SCAN"
1098 else: scanav = "NONE"
1099 scan = (self, )
1100 try:
1101 if align:
1102 scan = (self.freq_align(insitu=False), )
1103 s = None
1104 if weight.upper() == 'MEDIAN':
1105 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1106 scanav))
1107 else:
1108 s = scantable(self._math._average(scan, mask, weight.upper(),
1109 scanav))
1110 except RuntimeError, msg:
1111 if rcParams['verbose']:
1112 print msg
1113 return
1114 else: raise
1115 s._add_history("average_time", varlist)
1116 print_log()
1117 return s
1118
1119 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1120 """
1121 Return a scan where all spectra are converted to either
1122 Jansky or Kelvin depending upon the flux units of the scan table.
1123 By default the function tries to look the values up internally.
1124 If it can't find them (or if you want to over-ride), you must
1125 specify EITHER jyperk OR eta (and D which it will try to look up
1126 also if you don't set it). jyperk takes precedence if you set both.
1127 Parameters:
1128 jyperk: the Jy / K conversion factor
1129 eta: the aperture efficiency
1130 d: the geomtric diameter (metres)
1131 insitu: if False a new scantable is returned.
1132 Otherwise, the scaling is done in-situ
1133 The default is taken from .asaprc (False)
1134 """
1135 if insitu is None: insitu = rcParams['insitu']
1136 self._math._setinsitu(insitu)
1137 varlist = vars()
1138 if jyperk is None: jyperk = -1.0
1139 if d is None: d = -1.0
1140 if eta is None: eta = -1.0
1141 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1142 s._add_history("convert_flux", varlist)
1143 print_log()
1144 if insitu: self._assign(s)
1145 else: return s
1146
1147 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1148 """
1149 Return a scan after applying a gain-elevation correction.
1150 The correction can be made via either a polynomial or a
1151 table-based interpolation (and extrapolation if necessary).
1152 You specify polynomial coefficients, an ascii table or neither.
1153 If you specify neither, then a polynomial correction will be made
1154 with built in coefficients known for certain telescopes (an error
1155 will occur if the instrument is not known).
1156 The data and Tsys are *divided* by the scaling factors.
1157 Parameters:
1158 poly: Polynomial coefficients (default None) to compute a
1159 gain-elevation correction as a function of
1160 elevation (in degrees).
1161 filename: The name of an ascii file holding correction factors.
1162 The first row of the ascii file must give the column
1163 names and these MUST include columns
1164 "ELEVATION" (degrees) and "FACTOR" (multiply data
1165 by this) somewhere.
1166 The second row must give the data type of the
1167 column. Use 'R' for Real and 'I' for Integer.
1168 An example file would be
1169 (actual factors are arbitrary) :
1170
1171 TIME ELEVATION FACTOR
1172 R R R
1173 0.1 0 0.8
1174 0.2 20 0.85
1175 0.3 40 0.9
1176 0.4 60 0.85
1177 0.5 80 0.8
1178 0.6 90 0.75
1179 method: Interpolation method when correcting from a table.
1180 Values are "nearest", "linear" (default), "cubic"
1181 and "spline"
1182 insitu: if False a new scantable is returned.
1183 Otherwise, the scaling is done in-situ
1184 The default is taken from .asaprc (False)
1185 """
1186
1187 if insitu is None: insitu = rcParams['insitu']
1188 self._math._setinsitu(insitu)
1189 varlist = vars()
1190 if poly is None:
1191 poly = ()
1192 from os.path import expandvars
1193 filename = expandvars(filename)
1194 s = scantable(self._math._gainel(self, poly, filename, method))
1195 s._add_history("gain_el", varlist)
1196 print_log()
1197 if insitu: self._assign(s)
1198 else: return s
1199
1200 def freq_align(self, reftime=None, method='cubic', insitu=None):
1201 """
1202 Return a scan where all rows have been aligned in frequency/velocity.
1203 The alignment frequency frame (e.g. LSRK) is that set by function
1204 set_freqframe.
1205 Parameters:
1206 reftime: reference time to align at. By default, the time of
1207 the first row of data is used.
1208 method: Interpolation method for regridding the spectra.
1209 Choose from "nearest", "linear", "cubic" (default)
1210 and "spline"
1211 insitu: if False a new scantable is returned.
1212 Otherwise, the scaling is done in-situ
1213 The default is taken from .asaprc (False)
1214 """
1215 if insitu is None: insitu = rcParams["insitu"]
1216 self._math._setinsitu(insitu)
1217 varlist = vars()
1218 if reftime is None: reftime = ""
1219 s = scantable(self._math._freq_align(self, reftime, method))
1220 s._add_history("freq_align", varlist)
1221 print_log()
1222 if insitu: self._assign(s)
1223 else: return s
1224
1225 def opacity(self, tau, insitu=None):
1226 """
1227 Apply an opacity correction. The data
1228 and Tsys are multiplied by the correction factor.
1229 Parameters:
1230 tau: Opacity from which the correction factor is
1231 exp(tau*ZD)
1232 where ZD is the zenith-distance
1233 insitu: if False a new scantable is returned.
1234 Otherwise, the scaling is done in-situ
1235 The default is taken from .asaprc (False)
1236 """
1237 if insitu is None: insitu = rcParams['insitu']
1238 self._math._setinsitu(insitu)
1239 varlist = vars()
1240 s = scantable(self._math._opacity(self, tau))
1241 s._add_history("opacity", varlist)
1242 print_log()
1243 if insitu: self._assign(s)
1244 else: return s
1245
1246 def bin(self, width=5, insitu=None):
1247 """
1248 Return a scan where all spectra have been binned up.
1249 Parameters:
1250 width: The bin width (default=5) in pixels
1251 insitu: if False a new scantable is returned.
1252 Otherwise, the scaling is done in-situ
1253 The default is taken from .asaprc (False)
1254 """
1255 if insitu is None: insitu = rcParams['insitu']
1256 self._math._setinsitu(insitu)
1257 varlist = vars()
1258 s = scantable(self._math._bin(self, width))
1259 s._add_history("bin", varlist)
1260 print_log()
1261 if insitu: self._assign(s)
1262 else: return s
1263
1264
1265 def resample(self, width=5, method='cubic', insitu=None):
1266 """
1267 Return a scan where all spectra have been binned up.
1268
1269 Parameters:
1270 width: The bin width (default=5) in pixels
1271 method: Interpolation method when correcting from a table.
1272 Values are "nearest", "linear", "cubic" (default)
1273 and "spline"
1274 insitu: if False a new scantable is returned.
1275 Otherwise, the scaling is done in-situ
1276 The default is taken from .asaprc (False)
1277 """
1278 if insitu is None: insitu = rcParams['insitu']
1279 self._math._setinsitu(insitu)
1280 varlist = vars()
1281 s = scantable(self._math._resample(self, method, width))
1282 s._add_history("resample", varlist)
1283 print_log()
1284 if insitu: self._assign(s)
1285 else: return s
1286
1287
1288 def average_pol(self, mask=None, weight='none'):
1289 """
1290 Average the Polarisations together.
1291 Parameters:
1292 mask: An optional mask defining the region, where the
1293 averaging will be applied. The output will have all
1294 specified points masked.
1295 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1296 weighted), or 'tsys' (1/Tsys**2 weighted)
1297 """
1298 varlist = vars()
1299 if mask is None:
1300 mask = ()
1301 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1302 s._add_history("average_pol", varlist)
1303 print_log()
1304 return s
1305
1306 def average_beam(self, mask=None, weight='none'):
1307 """
1308 Average the Beams together.
1309 Parameters:
1310 mask: An optional mask defining the region, where the
1311 averaging will be applied. The output will have all
1312 specified points masked.
1313 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1314 weighted), or 'tsys' (1/Tsys**2 weighted)
1315 """
1316 varlist = vars()
1317 if mask is None:
1318 mask = ()
1319 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1320 s._add_history("average_beam", varlist)
1321 print_log()
1322 return s
1323
1324 def convert_pol(self, poltype=None):
1325 """
1326 Convert the data to a different polarisation type.
1327 Parameters:
1328 poltype: The new polarisation type. Valid types are:
1329 "linear", "stokes" and "circular"
1330 """
1331 varlist = vars()
1332 try:
1333 s = scantable(self._math._convertpol(self, poltype))
1334 except RuntimeError, msg:
1335 if rcParams['verbose']:
1336 print msg
1337 return
1338 else:
1339 raise
1340 s._add_history("convert_pol", varlist)
1341 print_log()
1342 return s
1343
1344 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1345 """
1346 Smooth the spectrum by the specified kernel (conserving flux).
1347 Parameters:
1348 kernel: The type of smoothing kernel. Select from
1349 'hanning' (default), 'gaussian', 'boxcar' and
1350 'rmedian'
1351 width: The width of the kernel in pixels. For hanning this is
1352 ignored otherwise it defauls to 5 pixels.
1353 For 'gaussian' it is the Full Width Half
1354 Maximum. For 'boxcar' it is the full width.
1355 For 'rmedian' it is the half width.
1356 insitu: if False a new scantable is returned.
1357 Otherwise, the scaling is done in-situ
1358 The default is taken from .asaprc (False)
1359 Example:
1360 none
1361 """
1362 if insitu is None: insitu = rcParams['insitu']
1363 self._math._setinsitu(insitu)
1364 varlist = vars()
1365 s = scantable(self._math._smooth(self, kernel.lower(), width))
1366 s._add_history("smooth", varlist)
1367 print_log()
1368 if insitu: self._assign(s)
1369 else: return s
1370
1371
1372 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1373 """
1374 Return a scan which has been baselined (all rows) by a polynomial.
1375 Parameters:
1376 mask: an optional mask
1377 order: the order of the polynomial (default is 0)
1378 plot: plot the fit and the residual. In this each
1379 indivual fit has to be approved, by typing 'y'
1380 or 'n'
1381 uselin: use linear polynomial fit
1382 insitu: if False a new scantable is returned.
1383 Otherwise, the scaling is done in-situ
1384 The default is taken from .asaprc (False)
1385 Example:
1386 # return a scan baselined by a third order polynomial,
1387 # not using a mask
1388 bscan = scan.poly_baseline(order=3)
1389 """
1390 if insitu is None: insitu = rcParams['insitu']
1391 varlist = vars()
1392 if mask is None:
1393 mask = [True for i in xrange(self.nchan(-1))]
1394 from asap.asapfitter import fitter
1395 try:
1396 f = fitter()
1397 f.set_scan(self, mask)
1398 if uselin:
1399 f.set_function(lpoly=order)
1400 else:
1401 f.set_function(poly=order)
1402 s = f.auto_fit(insitu, plot=plot)
1403 # Save parameters of baseline fits as a class attribute.
1404 # NOTICE: It does not reflect changes in scantable!
1405 self.blpars = f.blpars
1406 s._add_history("poly_baseline", varlist)
1407 print_log()
1408 if insitu: self._assign(s)
1409 else: return s
1410 except RuntimeError:
1411 msg = "The fit failed, possibly because it didn't converge."
1412 if rcParams['verbose']:
1413 print msg
1414 return
1415 else:
1416 raise RuntimeError(msg)
1417
1418
1419 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1420 threshold=3, chan_avg_limit=1, plot=False,
1421 insitu=None):
1422 """
1423 Return a scan which has been baselined (all rows) by a polynomial.
1424 Spectral lines are detected first using linefinder and masked out
1425 to avoid them affecting the baseline solution.
1426
1427 Parameters:
1428 mask: an optional mask retreived from scantable
1429 edge: an optional number of channel to drop at
1430 the edge of spectrum. If only one value is
1431 specified, the same number will be dropped from
1432 both sides of the spectrum. Default is to keep
1433 all channels. Nested tuples represent individual
1434 edge selection for different IFs (a number of spectral
1435 channels can be different)
1436 order: the order of the polynomial (default is 0)
1437 threshold: the threshold used by line finder. It is better to
1438 keep it large as only strong lines affect the
1439 baseline solution.
1440 chan_avg_limit:
1441 a maximum number of consequtive spectral channels to
1442 average during the search of weak and broad lines.
1443 The default is no averaging (and no search for weak
1444 lines). If such lines can affect the fitted baseline
1445 (e.g. a high order polynomial is fitted), increase this
1446 parameter (usually values up to 8 are reasonable). Most
1447 users of this method should find the default value
1448 sufficient.
1449 plot: plot the fit and the residual. In this each
1450 indivual fit has to be approved, by typing 'y'
1451 or 'n'
1452 insitu: if False a new scantable is returned.
1453 Otherwise, the scaling is done in-situ
1454 The default is taken from .asaprc (False)
1455
1456 Example:
1457 scan2=scan.auto_poly_baseline(order=7)
1458 """
1459 if insitu is None: insitu = rcParams['insitu']
1460 varlist = vars()
1461 from asap.asapfitter import fitter
1462 from asap.asaplinefind import linefinder
1463 from asap import _is_sequence_or_number as _is_valid
1464
1465 # check whether edge is set up for each IF individually
1466 individualedge = False;
1467 if len(edge) > 1:
1468 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1469 individualedge = True;
1470
1471 if not _is_valid(edge, int) and not individualedge:
1472 raise ValueError, "Parameter 'edge' has to be an integer or a \
1473 pair of integers specified as a tuple. Nested tuples are allowed \
1474 to make individual selection for different IFs."
1475
1476 curedge = (0, 0)
1477 if individualedge:
1478 for edgepar in edge:
1479 if not _is_valid(edgepar, int):
1480 raise ValueError, "Each element of the 'edge' tuple has \
1481 to be a pair of integers or an integer."
1482 else:
1483 curedge = edge;
1484
1485 # setup fitter
1486 f = fitter()
1487 f.set_function(poly=order)
1488
1489 # setup line finder
1490 fl = linefinder()
1491 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1492
1493 if not insitu:
1494 workscan = self.copy()
1495 else:
1496 workscan = self
1497
1498 fl.set_scan(workscan)
1499
1500 rows = range(workscan.nrow())
1501 # Save parameters of baseline fits & masklists as a class attribute.
1502 # NOTICE: It does not reflect changes in scantable!
1503 if len(rows) > 0:
1504 self.blpars=[]
1505 self.masklists=[]
1506 asaplog.push("Processing:")
1507 for r in rows:
1508 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1509 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1510 workscan.getpol(r), workscan.getcycle(r))
1511 asaplog.push(msg, False)
1512
1513 # figure out edge parameter
1514 if individualedge:
1515 if len(edge) >= workscan.getif(r):
1516 raise RuntimeError, "Number of edge elements appear to " \
1517 "be less than the number of IFs"
1518 curedge = edge[workscan.getif(r)]
1519
1520 # setup line finder
1521 fl.find_lines(r, mask, curedge)
1522 outmask=fl.get_mask()
1523 f.set_scan(workscan, fl.get_mask())
1524 f.x = workscan._getabcissa(r)
1525 f.y = workscan._getspectrum(r)
1526 f.data = None
1527 f.fit()
1528
1529 # Show mask list
1530 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1531 msg = "mask range: "+str(masklist)
1532 asaplog.push(msg, False)
1533
1534 fpar = f.get_parameters()
1535 if plot:
1536 f.plot(residual=True)
1537 x = raw_input("Accept fit ( [y]/n ): ")
1538 if x.upper() == 'N':
1539 self.blpars.append(None)
1540 self.masklists.append(None)
1541 continue
1542 workscan._setspectrum(f.fitter.getresidual(), r)
1543 self.blpars.append(fpar)
1544 self.masklists.append(masklist)
1545 if plot:
1546 f._p.unmap()
1547 f._p = None
1548 workscan._add_history("auto_poly_baseline", varlist)
1549 if insitu:
1550 self._assign(workscan)
1551 else:
1552 return workscan
1553
1554 def rotate_linpolphase(self, angle):
1555 """
1556 Rotate the phase of the complex polarization O=Q+iU correlation.
1557 This is always done in situ in the raw data. So if you call this
1558 function more than once then each call rotates the phase further.
1559 Parameters:
1560 angle: The angle (degrees) to rotate (add) by.
1561 Examples:
1562 scan.rotate_linpolphase(2.3)
1563 """
1564 varlist = vars()
1565 self._math._rotate_linpolphase(self, angle)
1566 self._add_history("rotate_linpolphase", varlist)
1567 print_log()
1568 return
1569
1570
1571 def rotate_xyphase(self, angle):
1572 """
1573 Rotate the phase of the XY correlation. This is always done in situ
1574 in the data. So if you call this function more than once
1575 then each call rotates the phase further.
1576 Parameters:
1577 angle: The angle (degrees) to rotate (add) by.
1578 Examples:
1579 scan.rotate_xyphase(2.3)
1580 """
1581 varlist = vars()
1582 self._math._rotate_xyphase(self, angle)
1583 self._add_history("rotate_xyphase", varlist)
1584 print_log()
1585 return
1586
1587 def swap_linears(self):
1588 """
1589 Swap the linear polarisations XX and YY, or better the first two
1590 polarisations as this also works for ciculars.
1591 """
1592 varlist = vars()
1593 self._math._swap_linears(self)
1594 self._add_history("swap_linears", varlist)
1595 print_log()
1596 return
1597
1598 def invert_phase(self):
1599 """
1600 Invert the phase of the complex polarisation
1601 """
1602 varlist = vars()
1603 self._math._invert_phase(self)
1604 self._add_history("invert_phase", varlist)
1605 print_log()
1606 return
1607
1608 def add(self, offset, insitu=None):
1609 """
1610 Return a scan where all spectra have the offset added
1611 Parameters:
1612 offset: the offset
1613 insitu: if False a new scantable is returned.
1614 Otherwise, the scaling is done in-situ
1615 The default is taken from .asaprc (False)
1616 """
1617 if insitu is None: insitu = rcParams['insitu']
1618 self._math._setinsitu(insitu)
1619 varlist = vars()
1620 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1621 s._add_history("add", varlist)
1622 print_log()
1623 if insitu:
1624 self._assign(s)
1625 else:
1626 return s
1627
1628 def scale(self, factor, tsys=True, insitu=None):
1629 """
1630 Return a scan where all spectra are scaled by the give 'factor'
1631 Parameters:
1632 factor: the scaling factor
1633 insitu: if False a new scantable is returned.
1634 Otherwise, the scaling is done in-situ
1635 The default is taken from .asaprc (False)
1636 tsys: if True (default) then apply the operation to Tsys
1637 as well as the data
1638 """
1639 if insitu is None: insitu = rcParams['insitu']
1640 self._math._setinsitu(insitu)
1641 varlist = vars()
1642 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1643 s._add_history("scale", varlist)
1644 print_log()
1645 if insitu:
1646 self._assign(s)
1647 else:
1648 return s
1649
1650 def auto_quotient(self, preserve=True, mode='paired'):
1651 """
1652 This function allows to build quotients automatically.
1653 It assumes the observation to have the same numer of
1654 "ons" and "offs"
1655 Parameters:
1656 preserve: you can preserve (default) the continuum or
1657 remove it. The equations used are
1658 preserve: Output = Toff * (on/off) - Toff
1659 remove: Output = Toff * (on/off) - Ton
1660 mode: the on/off detection mode
1661 'paired' (default)
1662 identifies 'off' scans by the
1663 trailing '_R' (Mopra/Parkes) or
1664 '_e'/'_w' (Tid) and matches
1665 on/off pairs from the observing pattern
1666 'time'
1667 finds the closest off in time
1668
1669 """
1670 modes = ["time", "paired"]
1671 if not mode in modes:
1672 msg = "please provide valid mode. Valid modes are %s" % (modes)
1673 raise ValueError(msg)
1674 varlist = vars()
1675 s = None
1676 if mode.lower() == "paired":
1677 basesel = self.get_selection()
1678 sel = selector()+basesel
1679 sel.set_query("SRCTYPE==1")
1680 self.set_selection(sel)
1681 offs = self.copy()
1682 sel.set_query("SRCTYPE==0")
1683 self.set_selection(sel)
1684 ons = self.copy()
1685 s = scantable(self._math._quotient(ons, offs, preserve))
1686 self.set_selection(basesel)
1687 elif mode.lower() == "time":
1688 s = scantable(self._math._auto_quotient(self, mode, preserve))
1689 s._add_history("auto_quotient", varlist)
1690 print_log()
1691 return s
1692
1693 def mx_quotient(self, mask = None, weight='median', preserve=True):
1694 """
1695 Form a quotient using "off" beams when observing in "MX" mode.
1696 Parameters:
1697 mask: an optional mask to be used when weight == 'stddev'
1698 weight: How to average the off beams. Default is 'median'.
1699 preserve: you can preserve (default) the continuum or
1700 remove it. The equations used are
1701 preserve: Output = Toff * (on/off) - Toff
1702 remove: Output = Toff * (on/off) - Ton
1703 """
1704 if mask is None: mask = ()
1705 varlist = vars()
1706 on = scantable(self._math._mx_extract(self, 'on'))
1707 preoff = scantable(self._math._mx_extract(self, 'off'))
1708 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1709 from asapmath import quotient
1710 q = quotient(on, off, preserve)
1711 q._add_history("mx_quotient", varlist)
1712 print_log()
1713 return q
1714
1715 def freq_switch(self, insitu=None):
1716 """
1717 Apply frequency switching to the data.
1718 Parameters:
1719 insitu: if False a new scantable is returned.
1720 Otherwise, the swictching is done in-situ
1721 The default is taken from .asaprc (False)
1722 Example:
1723 none
1724 """
1725 if insitu is None: insitu = rcParams['insitu']
1726 self._math._setinsitu(insitu)
1727 varlist = vars()
1728 s = scantable(self._math._freqswitch(self))
1729 s._add_history("freq_switch", varlist)
1730 print_log()
1731 if insitu: self._assign(s)
1732 else: return s
1733
1734 def recalc_azel(self):
1735 """
1736 Recalculate the azimuth and elevation for each position.
1737 Parameters:
1738 none
1739 Example:
1740 """
1741 varlist = vars()
1742 self._recalcazel()
1743 self._add_history("recalc_azel", varlist)
1744 print_log()
1745 return
1746
1747 def __add__(self, other):
1748 varlist = vars()
1749 s = None
1750 if isinstance(other, scantable):
1751 s = scantable(self._math._binaryop(self, other, "ADD"))
1752 elif isinstance(other, float):
1753 s = scantable(self._math._unaryop(self, other, "ADD", False))
1754 else:
1755 raise TypeError("Other input is not a scantable or float value")
1756 s._add_history("operator +", varlist)
1757 print_log()
1758 return s
1759
1760 def __sub__(self, other):
1761 """
1762 implicit on all axes and on Tsys
1763 """
1764 varlist = vars()
1765 s = None
1766 if isinstance(other, scantable):
1767 s = scantable(self._math._binaryop(self, other, "SUB"))
1768 elif isinstance(other, float):
1769 s = scantable(self._math._unaryop(self, other, "SUB", False))
1770 else:
1771 raise TypeError("Other input is not a scantable or float value")
1772 s._add_history("operator -", varlist)
1773 print_log()
1774 return s
1775
1776 def __mul__(self, other):
1777 """
1778 implicit on all axes and on Tsys
1779 """
1780 varlist = vars()
1781 s = None
1782 if isinstance(other, scantable):
1783 s = scantable(self._math._binaryop(self, other, "MUL"))
1784 elif isinstance(other, float):
1785 s = scantable(self._math._unaryop(self, other, "MUL", False))
1786 else:
1787 raise TypeError("Other input is not a scantable or float value")
1788 s._add_history("operator *", varlist)
1789 print_log()
1790 return s
1791
1792
1793 def __div__(self, other):
1794 """
1795 implicit on all axes and on Tsys
1796 """
1797 varlist = vars()
1798 s = None
1799 if isinstance(other, scantable):
1800 s = scantable(self._math._binaryop(self, other, "DIV"))
1801 elif isinstance(other, float):
1802 if other == 0.0:
1803 raise ZeroDivisionError("Dividing by zero is not recommended")
1804 s = scantable(self._math._unaryop(self, other, "DIV", False))
1805 else:
1806 raise TypeError("Other input is not a scantable or float value")
1807 s._add_history("operator /", varlist)
1808 print_log()
1809 return s
1810
1811 def get_fit(self, row=0):
1812 """
1813 Print or return the stored fits for a row in the scantable
1814 Parameters:
1815 row: the row which the fit has been applied to.
1816 """
1817 if row > self.nrow():
1818 return
1819 from asap.asapfit import asapfit
1820 fit = asapfit(self._getfit(row))
1821 if rcParams['verbose']:
1822 print fit
1823 return
1824 else:
1825 return fit.as_dict()
1826
1827 def _add_history(self, funcname, parameters):
1828 # create date
1829 sep = "##"
1830 from datetime import datetime
1831 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1832 hist = dstr+sep
1833 hist += funcname+sep#cdate+sep
1834 if parameters.has_key('self'): del parameters['self']
1835 for k, v in parameters.iteritems():
1836 if type(v) is dict:
1837 for k2, v2 in v.iteritems():
1838 hist += k2
1839 hist += "="
1840 if isinstance(v2, scantable):
1841 hist += 'scantable'
1842 elif k2 == 'mask':
1843 if isinstance(v2, list) or isinstance(v2, tuple):
1844 hist += str(self._zip_mask(v2))
1845 else:
1846 hist += str(v2)
1847 else:
1848 hist += str(v2)
1849 else:
1850 hist += k
1851 hist += "="
1852 if isinstance(v, scantable):
1853 hist += 'scantable'
1854 elif k == 'mask':
1855 if isinstance(v, list) or isinstance(v, tuple):
1856 hist += str(self._zip_mask(v))
1857 else:
1858 hist += str(v)
1859 else:
1860 hist += str(v)
1861 hist += sep
1862 hist = hist[:-2] # remove trailing '##'
1863 self._addhistory(hist)
1864
1865
1866 def _zip_mask(self, mask):
1867 mask = list(mask)
1868 i = 0
1869 segments = []
1870 while mask[i:].count(1):
1871 i += mask[i:].index(1)
1872 if mask[i:].count(0):
1873 j = i + mask[i:].index(0)
1874 else:
1875 j = len(mask)
1876 segments.append([i, j])
1877 i = j
1878 return segments
1879
1880 def _get_ordinate_label(self):
1881 fu = "("+self.get_fluxunit()+")"
1882 import re
1883 lbl = "Intensity"
1884 if re.match(".K.", fu):
1885 lbl = "Brightness Temperature "+ fu
1886 elif re.match(".Jy.", fu):
1887 lbl = "Flux density "+ fu
1888 return lbl
1889
1890 def _check_ifs(self):
1891 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1892 nchans = filter(lambda t: t > 0, nchans)
1893 return (sum(nchans)/len(nchans) == nchans[0])
1894
1895 def _fill(self, names, unit, average, getpt):
1896 import os
1897 from asap._asap import stfiller
1898 first = True
1899 fullnames = []
1900 for name in names:
1901 name = os.path.expandvars(name)
1902 name = os.path.expanduser(name)
1903 if not os.path.exists(name):
1904 msg = "File '%s' does not exists" % (name)
1905 if rcParams['verbose']:
1906 asaplog.push(msg)
1907 print asaplog.pop().strip()
1908 return
1909 raise IOError(msg)
1910 fullnames.append(name)
1911 if average:
1912 asaplog.push('Auto averaging integrations')
1913 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1914 for name in fullnames:
1915 tbl = Scantable(stype)
1916 r = stfiller(tbl)
1917 msg = "Importing %s..." % (name)
1918 asaplog.push(msg, False)
1919 print_log()
1920 r._open(name, -1, -1, getpt)
1921 r._read()
1922 #tbl = r._getdata()
1923 if average:
1924 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1925 #tbl = tbl2
1926 if not first:
1927 tbl = self._math._merge([self, tbl])
1928 #tbl = tbl2
1929 Scantable.__init__(self, tbl)
1930 r._close()
1931 del r, tbl
1932 first = False
1933 if unit is not None:
1934 self.set_fluxunit(unit)
1935 #self.set_freqframe(rcParams['scantable.freqframe'])
1936
Note: See TracBrowser for help on using the repository browser.