Changes in trunk/python [3112:2704]
- Location:
- trunk/python
- Files:
-
- 1 deleted
- 13 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/python/CMakeLists.txt
r3112 r2704 35 35 ${PYTHONDIR}/opacity.py 36 36 ${PYTHONDIR}/parameters.py 37 ${PYTHONDIR}/plotter2.py37 # ${PYTHONDIR}/plotter2.py 38 38 ${PYTHONDIR}/sbseparator.py 39 39 ${PYTHONDIR}/scantable.py -
trunk/python/__init__.py
r3112 r2704 54 54 from asapgrid import asapgrid, asapgrid2 55 55 from edgemarker import edgemarker 56 if is_casapy(): 57 from plotter2 import plotter2 56 #from plotter2 import plotter2 58 57 from sbseparator import sbseparator 59 58 from _asap import srctype 60 59 61 __date__ = get_asap_revdate()62 __version__ = ' 4.0.0a'60 __date__ = '$Date$'.split()[1] 61 __version__ = 'trunk' 63 62 __revision__ = get_revision() 64 63 -
trunk/python/asapfitter.py
r3112 r2704 157 157 158 158 if self.data is not None: 159 if self.data._getflagrow(row):160 raise RuntimeError,"Can not fit flagged row."161 159 self.x = self.data._getabcissa(row) 162 160 self.y = self.data._getspectrum(row) -
trunk/python/asapgrid.py
r3112 r2704 486 486 #print irow 487 487 # show image 488 extent=[self. blc[0]-0.5*self.cellx,489 self. trc[0]+0.5*self.cellx,488 extent=[self.trc[0]+0.5*self.cellx, 489 self.blc[0]-0.5*self.cellx, 490 490 self.blc[1]-0.5*self.celly, 491 491 self.trc[1]+0.5*self.celly] -
trunk/python/asaplotbase.py
r3112 r2704 9 9 from matplotlib.figure import Figure, Text 10 10 from matplotlib.font_manager import FontProperties as FP 11 from numpy import sqrt , ceil11 from numpy import sqrt 12 12 from matplotlib import rc, rcParams 13 13 from matplotlib.ticker import OldScalarFormatter … … 100 100 return False 101 101 102 def _subplotsOk(self, rows, cols, npanel=0):103 """104 Check if the axes in subplots are actually the ones plotted on105 the figure. Returns a bool.106 This method is to detect manual layout changes using mpl methods.107 """108 # compare with user defined layout109 if (rows is not None) and (rows != self.rows):110 return False111 if (cols is not None) and (cols != self.cols):112 return False113 # check number of subplots114 figaxes = self.figure.get_axes()115 np = self.rows*self.cols116 if npanel > np:117 return False118 if len(figaxes) != np:119 return False120 if len(self.subplots) != len(figaxes):121 return False122 # compare axes instance in this class and on the plotter123 ok = True124 for ip in range(np):125 if self.subplots[ip]['axes'] != figaxes[ip]:126 ok = False127 break128 return ok129 102 130 103 ### Delete artists ### -
trunk/python/asapmath.py
r3112 r2704 1 import re2 1 from asap.scantable import scantable 3 2 from asap.parameters import rcParams … … 91 90 else: 92 91 #s = scantable(stm._average(alignedlst, mask, weight.upper(), scanav)) 93 s = scantable(stm._new_average(alignedlst, compel, mask, 94 weight.upper(), scanav)) 92 s = scantable(stm._new_average(alignedlst, compel, mask, weight.upper(), scanav)) 95 93 s._add_history("average_time",varlist) 96 94 … … 393 391 p.quit() 394 392 del p 395 return sca ntab393 return scabtab 396 394 p.quit() 397 395 del p … … 613 611 p.quit() 614 612 del p 615 return sca ntab613 return scabtab 616 614 p.quit() 617 615 del p … … 816 814 p.quit() 817 815 del p 818 return sca ntab816 return scabtab 819 817 p.quit() 820 818 del p … … 824 822 825 823 @asaplog_post_dec 826 def merge(*args , **kwargs):824 def merge(*args): 827 825 """ 828 826 Merge a list of scanatables, or comma-sperated scantables into one … … 830 828 Parameters: 831 829 A list [scan1, scan2] or scan1, scan2. 832 freq_tol: frequency tolerance for merging IFs. numeric values833 in units of Hz (1.0e6 -> 1MHz) and string ('1MHz')834 is allowed.835 830 Example: 836 831 myscans = [scan1, scan2] … … 838 833 # or equivalent 839 834 sameallscans = merge(scan1, scan2) 840 # with freqtol841 allscans = merge(scan1, scan2, freq_tol=1.0e6)842 # or equivalently843 allscans = merge(scan1, scan2, freq_tol='1MHz')844 835 """ 845 836 varlist = vars() … … 850 841 else: 851 842 lst = tuple(args) 852 if kwargs.has_key('freq_tol'):853 freq_tol = str(kwargs['freq_tol'])854 if len(freq_tol) > 0 and re.match('.+[GMk]Hz$', freq_tol) is None:855 freq_tol += 'Hz'856 else:857 freq_tol = ''858 843 varlist["args"] = "%d scantables" % len(lst) 859 844 # need special formatting her for history... … … 864 849 msg = "Please give a list of scantables" 865 850 raise TypeError(msg) 866 s = scantable(stm._merge(lst , freq_tol))851 s = scantable(stm._merge(lst)) 867 852 s._add_history("merge", varlist) 868 853 return s … … 964 949 965 950 @asaplog_post_dec 966 def splitant(filename, outprefix='',overwrite=False , getpt=True):951 def splitant(filename, outprefix='',overwrite=False): 967 952 """ 968 953 Split Measurement set by antenna name, save data as a scantables, 969 and return a list of filename. Note that frequency reference frame 970 is imported as it is in Measurement set. 954 and return a list of filename. 971 955 Notice this method can only be available from CASA. 972 956 Prameter … … 979 963 The default False is to return with warning 980 964 without writing the output. USE WITH CARE. 981 getpt Whether to import direction from MS/POINTING 982 table or not. Default is True (import direction). 965 983 966 """ 984 967 # Import the table toolkit from CASA 985 from taskinit import gentools968 from casac import casac 986 969 from asap.scantable import is_ms 987 tb = gentools(['tb'])[0]970 tb = casac.table() 988 971 # Check the input filename 989 972 if isinstance(filename, str): … … 995 978 raise IOError(s) 996 979 # check if input file is MS 980 #if not os.path.isdir(filename) \ 981 # or not os.path.exists(filename+'/ANTENNA') \ 982 # or not os.path.exists(filename+'/table.f1'): 997 983 if not is_ms(filename): 998 984 s = "File '%s' is not a Measurement set." % (filename) … … 1010 996 tb.open(tablename=filename,nomodify=True) 1011 997 ant1=tb.getcol('ANTENNA1',0,-1,1) 998 #anttab=tb.getkeyword('ANTENNA').split()[-1] 1012 999 anttab=tb.getkeyword('ANTENNA').lstrip('Table: ') 1013 1000 tb.close() 1001 #tb.open(tablename=filename+'/ANTENNA',nomodify=True) 1014 1002 tb.open(tablename=anttab,nomodify=True) 1015 1003 nant=tb.nrows() 1016 1004 antnames=tb.getcol('NAME',0,nant,1) 1017 1005 tb.close() 1006 tmpname='asapmath.splitant.tmp' 1018 1007 for antid in set(ant1): 1019 scan=scantable(filename,average=False,antenna=int(antid),getpt=getpt) 1008 tb.open(tablename=filename,nomodify=True) 1009 tbsel=tb.query('ANTENNA1 == %s && ANTENNA2 == %s'%(antid,antid),tmpname) 1010 scan=scantable(tmpname,average=False,getpt=True,antenna=int(antid)) 1020 1011 outname=prefix+antnames[antid]+'.asap' 1021 1012 scan.save(outname,format='ASAP',overwrite=overwrite) 1013 tbsel.close() 1014 tb.close() 1015 del tbsel 1022 1016 del scan 1023 1017 outfiles.append(outname) 1018 os.system('rm -rf '+tmpname) 1019 del tb 1024 1020 return outfiles 1025 1021 1026 1022 @asaplog_post_dec 1027 def _array2dOp( scan, value, mode="ADD", tsys=False, insitu=None , skip_flaggedrow=False):1023 def _array2dOp( scan, value, mode="ADD", tsys=False, insitu=None): 1028 1024 """ 1029 1025 This function is workaround on the basic operation of scantable … … 1036 1032 insitu: if False, a new scantable is returned. 1037 1033 Otherwise, the array operation is done in-sitsu. 1038 skip_flaggedrow: skip operation for row-flagged spectra.1039 1034 """ 1040 1035 if insitu is None: insitu = rcParams['insitu'] … … 1045 1040 stm._setinsitu(insitu) 1046 1041 if len( value ) == 1: 1047 s = scantable( stm._arrayop( scan, value[0], mode, tsys , skip_flaggedrow) )1042 s = scantable( stm._arrayop( scan, value[0], mode, tsys ) ) 1048 1043 elif len( value ) != nrow: 1049 1044 raise ValueError( 'len(value) must be 1 or conform to scan.nrow()' ) … … 1063 1058 s.set_selection( sel ) 1064 1059 if len( value[irow] ) == 1: 1065 stm._unaryop( s, value[irow][0], mode, tsys , skip_flaggedrow)1060 stm._unaryop( s, value[irow][0], mode, tsys ) 1066 1061 else: 1067 1062 #stm._arrayop( s, value[irow], mode, tsys, 'channel' ) 1068 stm._arrayop( s, value[irow], mode, tsys , skip_flaggedrow)1063 stm._arrayop( s, value[irow], mode, tsys ) 1069 1064 s.set_selection(basesel) 1070 1065 return s -
trunk/python/asapplotter.py
r3112 r2704 112 112 self._plotter.legend(self._legendloc) 113 113 114 ### TODO: it's probably better to define following two methods in115 ### backend dependent class.116 114 def _new_custombar(self): 117 115 backend=matplotlib.get_backend() … … 132 130 return True 133 131 return False 134 ### end of TODO135 132 136 133 def _assert_plotter(self,action="status",errmsg=None): … … 279 276 280 277 281 ### Forwards to m ethods in matplotlib axes ###278 ### Forwards to matplotlib axes ### 282 279 def text(self, *args, **kwargs): 283 280 self._assert_plotter(action="reload") … … 418 415 del self._data 419 416 msg = "A new scantable is set to the plotter. "\ 420 "The masks , data selections, and labels are reset."421 asaplog.push( msg)417 "The masks and data selections are reset." 418 asaplog.push( msg ) 422 419 self._data = scan 423 420 # reset … … 517 514 self._rows = rows 518 515 self._cols = cols 519 # new layout is set. need to reset counters for multi page plotting520 self._reset_counters()521 516 if refresh and self._data: self.plot(self._data) 522 517 return … … 910 905 msg = "Can only set mask after a first call to plot()" 911 906 raise RuntimeError(msg) 912 if (mask is not None) andlen(mask):907 if len(mask): 913 908 if isinstance(mask, list) or isinstance(mask, tuple): 914 909 self._usermask = array(mask) … … 931 926 ### Reset methods ### 932 927 def _reset(self): 933 """Reset method called when new data is set""" 934 # reset selections and masks 928 self._usermask = [] 929 self._usermaskspectra = None 930 self._offset = None 935 931 self.set_selection(None, False) 936 self.set_mask(None, None, False)937 # reset offset938 self._offset = None939 # reset header940 932 self._reset_header() 941 # reset labels942 self._lmap = None # related to stack943 self.set_title(None, None, False)944 self.set_ordinate(None, None, False)945 self.set_abcissa(None, None, False)946 933 947 934 def _reset_header(self): … … 951 938 self._startrow = 0 952 939 self._ipanel = -1 940 self._reset_header() 953 941 self._panelrows = [] 954 self._reset_header()955 942 if self.casabar_exists(): 956 943 self._plotter.figmgr.casabar.set_pagecounter(1) … … 1044 1031 if self._panelling == 'i': 1045 1032 ganged = False 1046 if (not firstpage) and \ 1047 self._plotter._subplotsOk(self._rows, self._cols, n): 1048 # Not the first page and subplot number is ok. 1049 # Just clear the axis 1033 if not firstpage: 1034 # not the first page just clear the axis 1050 1035 nx = self._plotter.cols 1051 1036 ipaxx = n - nx - 1 #the max panel id to supress x-label … … 1317 1302 return start,end 1318 1303 1319 def _get_date_axis_setup(self, dates, axlim=None):1320 """1321 Returns proper axis title and formatters for a list of dates1322 Input1323 dates : a list of datetime objects returned by,1324 e.g. scantable.get_time(asdatetime=True)1325 axlim : a tuple of min and max day range in the plot axis.1326 if defined, the values are taken into account.1327 Output1328 a set of1329 * date axis title string1330 * formatter of date axis1331 * major axis locator1332 * minor axis locator1333 """1334 from matplotlib import pylab as PL1335 from matplotlib.dates import DateFormatter1336 from matplotlib.dates import HourLocator, MinuteLocator,SecondLocator, DayLocator, YearLocator, MonthLocator1337 t = PL.date2num(dates)1338 tmin = min(t)1339 tmax = max(t)1340 if axlim is not None:1341 tmin = min(tmin, min(axlim))1342 tmax = max(tmax, max(axlim))1343 tdel = tmax - tmin # interval in day1344 dstr = dates[0].strftime('%Y/%m/%d')1345 if tdel > 365.0: # >1year (also the case for single or very small time range)1346 majloc = YearLocator()1347 minloc = MonthLocator(range(1,12,6))1348 timefmt = DateFormatter('%Y/%m/%d')1349 elif tdel > 1.0: # >1day1350 dstr2 = dates[len(dates)-1].strftime('%Y/%m/%d')1351 dstr = dstr + " - " + dstr21352 majloc = DayLocator()1353 minloc = HourLocator(range(0,23,12))1354 timefmt = DateFormatter("%b%d")1355 elif tdel > 24./60.: # 9.6h - 1day1356 timefmt = DateFormatter('%H:%M')1357 majloc = HourLocator()1358 minloc = MinuteLocator(range(0,60,30))1359 elif tdel > 2./24.: # 2h-9.6h1360 timefmt = DateFormatter('%H:%M')1361 majloc = HourLocator()1362 minloc = MinuteLocator(range(0,60,10))1363 elif tdel> 10./24./60.: # 10min-2h1364 timefmt = DateFormatter('%H:%M')1365 majloc = MinuteLocator(range(0,60,10))1366 minloc = MinuteLocator()1367 else: # <10min1368 timefmt = DateFormatter('%H:%M')1369 majloc = MinuteLocator()1370 minloc = SecondLocator(30)1371 return (dstr, timefmt, majloc, minloc)1372 1373 1304 def plotazel(self, scan=None, outfile=None): 1374 1305 """ … … 1378 1309 visible = rcParams['plotter.gui'] 1379 1310 from matplotlib import pylab as PL 1311 from matplotlib.dates import DateFormatter 1380 1312 from pytz import timezone 1313 from matplotlib.dates import HourLocator, MinuteLocator,SecondLocator, DayLocator 1381 1314 from matplotlib.ticker import MultipleLocator 1382 from numpy import array, pi , ma1315 from numpy import array, pi 1383 1316 if self._plotter and (PL.gcf() == self._plotter.figure): 1384 1317 # the current figure is ASAP plotter. Use mpl plotter … … 1392 1325 self._data = scan 1393 1326 dates = self._data.get_time(asdatetime=True) 1394 # for flag handling1395 mask = [ self._data._is_all_chan_flagged(i) for i in range(self._data.nrow())]1396 1327 t = PL.date2num(dates) 1397 1328 tz = timezone('UTC') … … 1406 1337 wspace=wsp,hspace=hsp) 1407 1338 1408 tdel = max(t) - min(t) # interval in day1339 tdel = max(t) - min(t) 1409 1340 ax = PL.subplot(2,1,1) 1410 el = ma.masked_array(array(self._data.get_elevation())*180./pi, mask)1341 el = array(self._data.get_elevation())*180./pi 1411 1342 PL.ylabel('El [deg.]') 1412 (dstr, timefmt, majloc, minloc) = self._get_date_axis_setup(dates) 1413 1343 dstr = dates[0].strftime('%Y/%m/%d') 1344 if tdel > 1.0: 1345 dstr2 = dates[len(dates)-1].strftime('%Y/%m/%d') 1346 dstr = dstr + " - " + dstr2 1347 majloc = DayLocator() 1348 minloc = HourLocator(range(0,23,12)) 1349 timefmt = DateFormatter("%b%d") 1350 elif tdel > 24./60.: 1351 timefmt = DateFormatter('%H:%M') 1352 majloc = HourLocator() 1353 minloc = MinuteLocator(30) 1354 else: 1355 timefmt = DateFormatter('%H:%M') 1356 majloc = MinuteLocator(interval=5) 1357 minloc = SecondLocator(30) 1358 1414 1359 PL.title(dstr) 1415 1360 if tdel == 0.0: … … 1418 1363 else: 1419 1364 PL.plot_date(t,el,'o', markersize=2, markerfacecolor='b', markeredgecolor='b',tz=tz) 1420 #ax.xaxis.set_major_formatter(timefmt) 1421 #ax.xaxis.set_major_locator(majloc) 1422 #ax.xaxis.set_minor_locator(minloc) 1365 #ax.grid(True) 1366 ax.xaxis.set_major_formatter(timefmt) 1367 ax.xaxis.set_major_locator(majloc) 1368 ax.xaxis.set_minor_locator(minloc) 1423 1369 ax.yaxis.grid(True) 1370 yloc = MultipleLocator(30) 1424 1371 ax.set_ylim(0,90) 1425 #yloc = MultipleLocator(30) 1426 #ax.yaxis.set_major_locator(yloc) 1372 ax.yaxis.set_major_locator(yloc) 1427 1373 if tdel > 1.0: 1428 1374 labels = ax.get_xticklabels() … … 1431 1377 1432 1378 # Az plot 1433 az = ma.masked_array(array(self._data.get_azimuth())*180./pi, mask)1379 az = array(self._data.get_azimuth())*180./pi 1434 1380 if min(az) < 0: 1435 1381 for irow in range(len(az)): … … 1443 1389 else: 1444 1390 PL.plot_date(t,az,'o', markersize=2,markeredgecolor='b',markerfacecolor='b',tz=tz) 1445 #ax2.xaxis.set_major_formatter(timefmt) 1446 #ax2.xaxis.set_major_locator(majloc) 1447 #ax2.xaxis.set_minor_locator(minloc) 1391 ax2.xaxis.set_major_formatter(timefmt) 1392 ax2.xaxis.set_major_locator(majloc) 1393 ax2.xaxis.set_minor_locator(minloc) 1394 #ax2.grid(True) 1448 1395 ax2.set_ylim(0,360) 1449 1396 ax2.yaxis.grid(True) 1450 1397 #hfmt = DateFormatter('%H') 1451 1398 #hloc = HourLocator() 1452 #yloc = MultipleLocator(60)1453 #ax2.yaxis.set_major_locator(yloc)1399 yloc = MultipleLocator(60) 1400 ax2.yaxis.set_major_locator(yloc) 1454 1401 if tdel > 1.0: 1455 1402 labels = ax2.get_xticklabels() 1456 1403 PL.setp(labels, fontsize=10) 1457 # PL.xlabel('Time (UT [day])') 1458 #else: 1459 # PL.xlabel('Time (UT [hour])') 1460 PL.xlabel('Time (UT)') 1404 PL.xlabel('Time (UT [day])') 1405 else: 1406 PL.xlabel('Time (UT [hour])') 1461 1407 1462 1408 PL.ion() … … 1471 1417 plot telescope pointings 1472 1418 Parameters: 1473 scan : inputscantable instance1419 infile : input filename or scantable instance 1474 1420 colorby : change color by either 1475 1421 'type'(source type)|'scan'|'if'|'pol'|'beam' … … 1479 1425 """ 1480 1426 self._plotmode = "pointing" 1481 from numpy import array, pi , ma1427 from numpy import array, pi 1482 1428 from asap import scantable 1483 1429 # check for scantable … … 1555 1501 self._data.set_selection(basesel) 1556 1502 continue 1557 #print "Plotting direction of %s = %s" % (colorby, str(idx))1503 print "Plotting direction of %s = %s" % (colorby, str(idx)) 1558 1504 # getting data to plot 1559 1505 dir = array(self._data.get_directionval()).transpose() 1560 # for flag handling1561 mask = [ self._data._is_all_chan_flagged(i) for i in range(self._data.nrow())]1562 1506 ra = dir[0]*180./pi 1563 dec = ma.masked_array(dir[1]*180./pi, mask)1507 dec = dir[1]*180./pi 1564 1508 # actual plot 1565 1509 self._plotter.set_line(label=(sellab+str(idx))) … … 1587 1531 # reverse x-axis 1588 1532 xmin, xmax = self.gca().get_xlim() 1589 ymin, ymax = self.gca().get_ylim() 1590 # expand plotrange if xmin==xmax or ymin==ymax 1591 if abs(ymax-ymin) < 1.e-3: #~4arcsec 1592 delx = 0.5*abs(xmax - xmin) 1593 if delx < 5.e-4: 1594 dxy = 5.e-4 #~2arcsec 1595 (ymin, ymax) = (ymin-dxy, ymax+dxy) 1596 (xmin, xmax) = (xmin-dxy, xmax+dxy) 1597 (ymin, ymax) = (ymin-delx, ymax+delx) 1598 elif abs(xmax-xmin) < 1.e-3: 1599 dely = 0.5*abs(ymax - ymin) 1600 (xmin, xmax) = (xmin-dely, xmax+dely) 1601 self._plotter.set_limits(xlim=[xmax,xmin], ylim=[ymin, ymax]) 1533 self._plotter.set_limits(xlim=[xmax,xmin]) 1602 1534 1603 1535 self._plotter.release() … … 1655 1587 # plotting in time is not yet implemented.. 1656 1588 @asaplog_post_dec 1657 def plottp(self, scan=None, colorby=''): 1658 """ 1659 Plot averaged spectra (total power) in time or in row ID (colorby='') 1660 Parameters: 1661 scan : input scantable instance 1662 colorby : change color by either 1663 'type'(source type)|'scan'|'if'|'pol'|'beam'|'' 1664 """ 1589 def plottp(self, scan=None): 1665 1590 self._plotmode = "totalpower" 1666 1591 from asap import scantable … … 1693 1618 left=lef,bottom=bot,right=rig,top=top,wspace=wsp,hspace=hsp) 1694 1619 if self.casabar_exists(): self._plotter.figmgr.casabar.disable_button() 1695 if len(colorby) == 0: 1696 self._plottp(self._data) 1697 else: 1698 self._plottp_in_time(self._data,colorby) 1620 self._plottp(self._data) 1699 1621 if self._minmaxy is not None: 1700 1622 self._plotter.set_limits(ylim=self._minmaxy) … … 1703 1625 self._plotter.show(hardrefresh=False) 1704 1626 return 1705 1706 def _plottp_in_time(self,scan,colorby):1707 """1708 private method for plotting total power data in time1709 Parameters:1710 scan : input scantable instance1711 colorby : change color by either1712 'type'(source type)|'scan'|'if'|'pol'|'beam'1713 """1714 from numpy import ma, array, arange, logical_not1715 r=01716 nr = scan.nrow()1717 a0,b0 = -1,-11718 allxlim = []1719 allylim = []1720 y=[]1721 self._plotter.set_panels()1722 self._plotter.palette(0)1723 # check of overlay settings1724 time_types = ['type','scan'] # time dependent meta-data1725 misc_types = ['if','pol','beam'] # time independent meta-data1726 validtypes=time_types + misc_types1727 stype = None1728 col_msg = "Invalid choice of 'colorby' (choices: %s)" % str(validtypes)1729 colorby = colorby.lower()1730 if (colorby in validtypes):1731 stype = colorby[0]1732 elif len(colorby) > 0:1733 raise ValueError(col_msg)1734 if not stype:1735 raise ValueError(col_msg)1736 # Selection and sort order1737 basesel = scan.get_selection()1738 if colorby in misc_types: misc_types.pop(misc_types.index(colorby))1739 sel_lbl = ""1740 for meta in misc_types:1741 idx = getattr(scan,'get'+meta+'nos')()1742 if len(idx) > 1: getattr(basesel, 'set_'+meta+'s')([idx[0]])1743 sel_lbl += ("%s%d, " % (meta.upper(), idx[0]))1744 sel_lbl = sel_lbl.rstrip(', ')1745 scan.set_selection(basesel)1746 if len(sel_lbl) > 0:1747 asaplog.push("Selection contains multiple IFs/Pols/Beams. Plotting the first ones: %s" % sel_lbl)1748 asaplog.post("WARN")1749 if stype == 't':1750 selIds = range(15)1751 sellab = "src type "1752 else:1753 selIds = getattr(scan,'get'+colorby+'nos')()1754 sellab = colorby.upper()1755 selFunc = "set_"+colorby+"s"1756 basesel.set_order(["TIME"])1757 # define axes labels1758 xlab = self._abcissa or 'Time (UTC)'1759 ylab = self._ordinate or scan._get_ordinate_label()1760 self._plotter.set_axes('xlabel',xlab)1761 self._plotter.set_axes('ylabel',ylab)1762 # define the panel title1763 if len(sel_lbl) > 0: lbl = sel_lbl1764 else: lbl = self._get_label(scan, r, 's', self._title)1765 if isinstance(lbl, list) or isinstance(lbl, tuple):1766 # get default label1767 lbl = self._get_label(scan, r, self._panelling, None)1768 self._plotter.set_axes('title',lbl)1769 # linestyle1770 lstyle = '' if colorby in time_types else ':'1771 alldates = []1772 for idx in selIds:1773 sel = selector() + basesel1774 bid = getattr(basesel,'get_'+colorby+"s")()1775 if (len(bid) > 0) and (not idx in bid):1776 # base selection doesn't contain idx1777 # Note summation of selector is logical sum if1778 continue1779 getattr(sel, selFunc)([idx])1780 if not sel.is_empty():1781 try:1782 scan.set_selection(sel)1783 except RuntimeError, instance:1784 if stype == 't' and str(instance).startswith("Selection contains no data."):1785 continue1786 else:1787 scan.set_selection(basesel)1788 raise RuntimeError, instance1789 if scan.nrow() == 0:1790 scan.set_selection(basesel)1791 continue1792 y=array(scan._get_column(scan._getspectrum,-1))1793 m = array(scan._get_column(scan._getmask,-1))1794 y = ma.masked_array(y,mask=logical_not(array(m,copy=False)))1795 # try to handle spectral data somewhat...1796 try:1797 l,m = y.shape1798 except ValueError, e:1799 raise ValueError(str(e)+" This error usually occurs when you select multiple spws with different number of channels. Try selecting single spw and retry.")1800 if m > 1:1801 y=y.mean(axis=1)1802 # flag handling1803 m = [ scan._is_all_chan_flagged(i) for i in range(scan.nrow()) ]1804 y = ma.masked_array(y,mask=m)1805 if len(y) == 0: continue1806 # line label1807 llbl=sellab+str(idx)1808 from matplotlib.dates import date2num1809 from pytz import timezone1810 dates = self._data.get_time(asdatetime=True)1811 alldates += list(dates)1812 x = date2num(dates)1813 tz = timezone('UTC')1814 # get color1815 lc = self._plotter.colormap[self._plotter.color]1816 self._plotter.palette( (self._plotter.color+1) % len(self._plotter.colormap) )1817 # actual plotting1818 self._plotter.axes.plot_date(x,y,tz=tz,label=llbl,linestyle=lstyle,color=lc,1819 marker='o',markersize=3,markeredgewidth=0)1820 1821 # legend and axis formatting1822 ax = self.gca()1823 (dstr, timefmt, majloc, minloc) = self._get_date_axis_setup(alldates, ax.get_xlim())1824 ax.xaxis.set_major_formatter(timefmt)1825 ax.xaxis.set_major_locator(majloc)1826 ax.xaxis.set_minor_locator(minloc)1827 self._plotter.axes.legend(loc=self._legendloc)1828 1627 1829 1628 def _plottp(self,scan): … … 1862 1661 x = arange(len(y)) 1863 1662 # try to handle spectral data somewhat... 1864 try: 1865 l,m = y.shape 1866 except ValueError, e: 1867 raise ValueError(str(e)+" This error usually occurs when you select multiple spws with different number of channels. Try selecting single spw and retry.") 1663 l,m = y.shape 1868 1664 if m > 1: 1869 1665 y=y.mean(axis=1) 1870 # flag handling1871 m = [ scan._is_all_chan_flagged(i) for i in range(scan.nrow()) ]1872 y = ma.masked_array(y,mask=m)1873 1666 plotit = self._plotter.plot 1874 1667 llbl = self._get_label(scan, r, self._stacking, None) … … 1908 1701 selstr += '\n' 1909 1702 self._headtext['selstr'] = selstr 1910 #ssel=(selstr+self._data.get_selection().__str__()+self._selection.__str__() or 'none') 1911 curr_selstr = selstr+self._data.get_selection().__str__() or "none" 1912 ssel=(curr_selstr+"\n" +self._selection.__str__()) 1913 headstr.append('\n\n***Selections***\n'+ssel.replace('$','\$')) 1703 ssel=(selstr+self._data.get_selection().__str__()+self._selection.__str__() or 'none') 1704 headstr.append('***Selections***\n'+ssel) 1914 1705 1915 1706 if plot: … … 1961 1752 # plot spectra by pointing 1962 1753 @asaplog_post_dec 1963 def plotgrid(self, scan=None,center= "",spacing=[],rows=None,cols=None):1754 def plotgrid(self, scan=None,center=None,spacing=None,rows=None,cols=None): 1964 1755 """ 1965 1756 Plot spectra based on direction. … … 1967 1758 Parameters: 1968 1759 scan: a scantable to plot 1969 center: the grid center direction (a string) 1760 center: the grid center direction (a list) of plots in the 1761 unit of DIRECTION column. 1970 1762 (default) the center of map region 1971 (example) 'J2000 19h30m00s -25d00m00s'1972 1763 spacing: a list of horizontal (R.A.) and vertical (Dec.) 1973 spacing .1764 spacing in the unit of DIRECTION column. 1974 1765 (default) Calculated by the extent of map region and 1975 (example) ['1arcmin', '1arcmin']1976 1766 the number of rows and cols to cover 1977 1767 rows: number of panels (grid points) in horizontal direction … … 1997 1787 1998 1788 # Rows and cols 1999 if (self._rows is None): 2000 rows = max(1, rows) 2001 if (self._cols is None): 2002 cols = max(1, cols) 2003 self.set_layout(rows,cols,False) 2004 2005 # Select the first IF, POL, and BEAM for plotting 1789 if rows: 1790 self._rows = int(rows) 1791 else: 1792 msg = "Number of rows to plot are not specified. " 1793 if self._rows: 1794 msg += "Using previous value = %d" % (self._rows) 1795 asaplog.push(msg) 1796 else: 1797 self._rows = 1 1798 msg += "Setting rows = %d" % (self._rows) 1799 asaplog.post() 1800 asaplog.push(msg) 1801 asaplog.post("WARN") 1802 if cols: 1803 self._cols = int(cols) 1804 else: 1805 msg = "Number of cols to plot are not specified. " 1806 if self._cols: 1807 msg += "Using previous value = %d" % (self._cols) 1808 asaplog.push(msg) 1809 else: 1810 self._cols = 1 1811 msg += "Setting cols = %d" % (self._cols) 1812 asaplog.post() 1813 asaplog.push(msg) 1814 asaplog.post("WARN") 1815 1816 # Center and spacing 1817 dirarr = array(self._data.get_directionval()).transpose() 1818 print "Pointing range: (x, y) = (%f - %f, %f - %f)" %\ 1819 (dirarr[0].min(),dirarr[0].max(),dirarr[1].min(),dirarr[1].max()) 1820 dircent = [0.5*(dirarr[0].max() + dirarr[0].min()), 1821 0.5*(dirarr[1].max() + dirarr[1].min())] 1822 del dirarr 1823 if center is None: 1824 #asaplog.post() 1825 asaplog.push("Grid center is not specified. Automatically calculated from pointing center.") 1826 #asaplog.post("WARN") 1827 #center = [dirarr[0].mean(), dirarr[1].mean()] 1828 center = dircent 1829 elif (type(center) in (list, tuple)) and len(center) > 1: 1830 from numpy import pi 1831 # make sure center_x is in +-pi of pointing center 1832 # (assumes dirs are in rad) 1833 rotnum = round(abs(center[0] - dircent[0])/(2*pi)) 1834 if center[0] < dircent[0]: rotnum *= -1 1835 cenx = center[0] - rotnum*2*pi 1836 center = [cenx, center[1]] 1837 else: 1838 msg = "Direction of grid center should be a list of float (R.A., Dec.)" 1839 raise ValueError, msg 1840 asaplog.push("Grid center: (%f, %f) " % (center[0],center[1])) 1841 1842 if spacing is None: 1843 #asaplog.post() 1844 asaplog.push("Grid spacing not specified. Automatically calculated from map coverage") 1845 #asaplog.post("WARN") 1846 # automatically get spacing 1847 dirarr = array(self._data.get_directionval()).transpose() 1848 wx = 2. * max(abs(dirarr[0].max()-center[0]), 1849 abs(dirarr[0].min()-center[0])) 1850 wy = 2. * max(abs(dirarr[1].max()-center[1]), 1851 abs(dirarr[1].min()-center[1])) 1852 ## slightly expand area to plot the edges 1853 #wx *= 1.1 1854 #wy *= 1.1 1855 xgrid = wx/max(self._cols-1.,1.) 1856 #xgrid = wx/float(max(self._cols,1.)) 1857 xgrid *= cos(center[1]) 1858 ygrid = wy/max(self._rows-1.,1.) 1859 #ygrid = wy/float(max(self._rows,1.)) 1860 # single pointing (identical R.A. and/or Dec. for all spectra.) 1861 if xgrid == 0: 1862 xgrid = 1. 1863 if ygrid == 0: 1864 ygrid = 1. 1865 # spacing should be negative to transpose plot 1866 spacing = [- xgrid, - ygrid] 1867 del dirarr, xgrid, ygrid 1868 #elif isinstance(spacing, str): 1869 # # spacing is a quantity 1870 elif (type(spacing) in (list, tuple)) and len(spacing) > 1: 1871 for i in xrange(2): 1872 val = spacing[i] 1873 if not isinstance(val, float): 1874 raise TypeError("spacing should be a list of float") 1875 if val > 0.: 1876 spacing[i] = -val 1877 spacing = spacing[0:2] 1878 else: 1879 msg = "Invalid spacing." 1880 raise TypeError(msg) 1881 asaplog.push("Spacing: (%f, %f) (projected)" % (spacing[0],spacing[1])) 1882 2006 1883 ntotpl = self._rows * self._cols 2007 1884 ifs = self._data.getifnos() … … 2030 1907 asaplog.push(msg) 2031 1908 asaplog.post("WARN") 2032 2033 # Prepare plotter 1909 2034 1910 self._assert_plotter(action="reload") 2035 1911 self._plotter.hold() … … 2047 1923 from asap._asap import plothelper as plhelper 2048 1924 ph = plhelper(self._data) 2049 #ph.set_gridval(self._cols, self._rows, spacing[0], spacing[1], 2050 # center[0], center[1], epoch="J2000", projname="SIN") 2051 if type(spacing) in (list, tuple, array): 2052 if len(spacing) == 0: 2053 spacing = ["", ""] 2054 elif len(spacing) == 1: 2055 spacing = [spacing[0], spacing[0]] 2056 else: 2057 spacing = [spacing, spacing] 2058 ph.set_grid(self._cols, self._rows, spacing[0], spacing[1], \ 2059 center, projname="SIN") 2060 1925 ph.set_gridval(self._cols, self._rows, spacing[0], spacing[1], 1926 center[0], center[1], epoch="J2000", projname="SIN") 2061 1927 # Actual plot 2062 1928 npl = 0 -
trunk/python/customgui_base.py
r3112 r2704 1 1 import os 2 import weakref3 2 import matplotlib, numpy 4 3 from asap.logging import asaplog, asaplog_post_dec … … 14 13 class CustomToolbarCommon: 15 14 def __init__(self,parent): 16 self.plotter = weakref.ref(parent)15 self.plotter = parent 17 16 #self.figmgr=self.plotter._plotter.figmgr 18 19 def _get_plotter(self):20 # check for the validity of the plotter and21 # return the plotter class instance if its valid.22 if self.plotter() is None:23 raise RuntimeError, "Internal Error. The plotter has been destroyed."24 else:25 return self.plotter()26 17 27 18 ### select the nearest spectrum in pick radius … … 38 29 if event.button != 1: 39 30 return 40 41 # check for the validity of plotter and get the plotter42 theplotter = self._get_plotter()43 31 44 32 xclick = event.xdata … … 74 62 del pind, inds, xlin, ylin 75 63 # Spectra are Picked 76 theplot = theplotter._plotter64 theplot = self.plotter._plotter 77 65 thetoolbar = self.figmgr.toolbar 78 66 thecanvas = self.figmgr.canvas … … 166 154 return 167 155 168 # check for the validity of plotter and get the plotter169 theplotter = self._get_plotter()170 171 156 self._thisregion = {'axes': event.inaxes,'xs': event.x, 172 157 'worldx': [event.xdata,event.xdata], … … 175 160 self.xdataold = event.xdata 176 161 177 theplotter._plotter.register('button_press',None)178 theplotter._plotter.register('motion_notify', self._xspan_draw)179 theplotter._plotter.register('button_press', self._xspan_end)162 self.plotter._plotter.register('button_press',None) 163 self.plotter._plotter.register('motion_notify', self._xspan_draw) 164 self.plotter._plotter.register('button_press', self._xspan_end) 180 165 181 166 def _xspan_draw(self,event): … … 218 203 xdataend = self.xdataold 219 204 220 # check for the validity of plotter and get the plotter221 theplotter = self._get_plotter()222 223 205 self._thisregion['worldx'][1] = xdataend 224 206 # print statistics of spectra in subplot … … 226 208 227 209 # release event 228 theplotter._plotter.register('button_press',None)229 theplotter._plotter.register('motion_notify',None)210 self.plotter._plotter.register('button_press',None) 211 self.plotter._plotter.register('motion_notify',None) 230 212 # Clear up region selection 231 213 self._thisregion = None … … 233 215 self.xold = None 234 216 # finally recover region selection event 235 theplotter._plotter.register('button_press',self._single_mask)217 self.plotter._plotter.register('button_press',self._single_mask) 236 218 237 219 def _subplot_stats(self,selection): … … 339 321 ### actual plotting of the new page 340 322 def _new_page(self,goback=False): 341 # check for the validity of plotter and get the plotter342 theplotter = self._get_plotter()343 344 323 top = None 345 header = theplotter._headtext324 header = self.plotter._headtext 346 325 reset = False 347 326 doheader = (isinstance(header['textobj'],list) and \ 348 327 len(header['textobj']) > 0) 349 328 if doheader: 350 top = theplotter._plotter.figure.subplotpars.top329 top = self.plotter._plotter.figure.subplotpars.top 351 330 fontsize = header['textobj'][0].get_fontproperties().get_size() 352 if theplotter._startrow <= 0:331 if self.plotter._startrow <= 0: 353 332 msg = "The page counter is reset due to chages of plot settings. " 354 333 msg += "Plotting from the first page." … … 363 342 if header.has_key('selstr'): 364 343 selstr = header['selstr'] 365 theplotter._reset_header()366 367 theplotter._plotter.hold()344 self.plotter._reset_header() 345 346 self.plotter._plotter.hold() 368 347 if goback: 369 348 self._set_prevpage_counter() 370 # theplotter._plotter.clear()371 theplotter._plot(theplotter._data)349 #self.plotter._plotter.clear() 350 self.plotter._plot(self.plotter._data) 372 351 pagenum = self._get_pagenum() 373 352 self.set_pagecounter(pagenum) … … 375 354 #if header['textobj']: 376 355 if doheader and pagenum == 1: 377 if top and top != theplotter._margins[3]:356 if top and top != self.plotter._margins[3]: 378 357 # work around for sdplot in CASA. complete checking in future? 379 theplotter._plotter.figure.subplots_adjust(top=top)358 self.plotter._plotter.figure.subplots_adjust(top=top) 380 359 if reset: 381 theplotter.print_header(plot=True,fontsize=fontsize,selstr=selstr, extrastr=extrastr)360 self.plotter.print_header(plot=True,fontsize=fontsize,selstr=selstr, extrastr=extrastr) 382 361 else: 383 theplotter._header_plot(header['string'],fontsize=fontsize)384 theplotter._plotter.release()385 theplotter._plotter.tidy()386 theplotter._plotter.show(hardrefresh=False)362 self.plotter._header_plot(header['string'],fontsize=fontsize) 363 self.plotter._plotter.release() 364 self.plotter._plotter.tidy() 365 self.plotter._plotter.show(hardrefresh=False) 387 366 del top 388 367 389 368 ### calculate the panel ID and start row to plot the previous page 390 369 def _set_prevpage_counter(self): 391 # check for the validity of plotter and get the plotter392 theplotter = self._get_plotter()393 394 370 # set row and panel counters to those of the 1st panel of previous page 395 371 maxpanel = 16 396 372 # the ID of the last panel in current plot 397 lastpanel = theplotter._ipanel373 lastpanel = self.plotter._ipanel 398 374 # the number of current subplots 399 currpnum = len( theplotter._plotter.subplots)375 currpnum = len(self.plotter._plotter.subplots) 400 376 # the nuber of previous subplots 401 377 prevpnum = None 402 if theplotter._rows and theplotter._cols:378 if self.plotter._rows and self.plotter._cols: 403 379 # when user set layout 404 prevpnum = theplotter._rows*theplotter._cols380 prevpnum = self.plotter._rows*self.plotter._cols 405 381 else: 406 382 # no user specification … … 409 385 start_ipanel = max(lastpanel-currpnum-prevpnum+1, 0) 410 386 # set the pannel ID of the last panel of prev-prev page 411 theplotter._ipanel = start_ipanel-1412 if theplotter._panelling == 'r':413 theplotter._startrow = start_ipanel387 self.plotter._ipanel = start_ipanel-1 388 if self.plotter._panelling == 'r': 389 self.plotter._startrow = start_ipanel 414 390 else: 415 391 # the start row number of the next panel 416 theplotter._startrow = theplotter._panelrows[start_ipanel]392 self.plotter._startrow = self.plotter._panelrows[start_ipanel] 417 393 del lastpanel,currpnum,prevpnum,start_ipanel 418 394 … … 429 405 430 406 def _get_pagenum(self): 431 # check for the validity of plotter and get the plotter432 theplotter = self._get_plotter()433 434 407 # get the ID of last panel in the current page 435 idlastpanel = theplotter._ipanel408 idlastpanel = self.plotter._ipanel 436 409 # max panels in a page 437 ppp = theplotter._plotter.rows*theplotter._plotter.cols410 ppp = self.plotter._plotter.rows*self.plotter._plotter.cols 438 411 return int(idlastpanel/ppp)+1 439 412 … … 710 683 class CustomFlagToolbarCommon: 711 684 def __init__(self,parent): 712 self.plotter= weakref.ref(parent)685 self.plotter=parent 713 686 #self.figmgr=self.plotter._plotter.figmgr 714 687 self._selregions = {} … … 718 691 self.xdataold=None 719 692 720 def _get_plotter(self):721 # check for the validity of the plotter and722 # return the plotter class instance if its valid.723 if self.plotter() is None:724 raise RuntimeError, "Internal Error. The plotter has been destroyed."725 else:726 return self.plotter()727 728 693 ### select the nearest spectrum in pick radius 729 694 ### and display spectral value on the toolbar. … … 739 704 if event.button != 1: 740 705 return 741 742 # check for the validity of plotter and get the plotter743 theplotter = self._get_plotter()744 706 745 707 xclick = event.xdata … … 775 737 del pind, inds, xlin, ylin 776 738 # Spectra are Picked 777 theplot = theplotter._plotter739 theplot = self.plotter._plotter 778 740 thetoolbar = self.figmgr.toolbar 779 741 thecanvas = self.figmgr.canvas … … 857 819 if event.button != 1 or event.inaxes == None: 858 820 return 859 # check for the validity of plotter and get the plotter860 theplotter = self._get_plotter()861 821 # this row resolution assumes row panelling 862 822 irow = int(self._getrownum(event.inaxes)) … … 869 829 self._thisregion = {'axes': event.inaxes,'xs': event.x, 870 830 'worldx': [event.xdata,event.xdata]} 871 theplotter._plotter.register('button_press',None)831 self.plotter._plotter.register('button_press',None) 872 832 self.xold = event.x 873 833 self.xdataold = event.xdata 874 theplotter._plotter.register('motion_notify', self._xspan_draw)875 theplotter._plotter.register('button_press', self._xspan_end)834 self.plotter._plotter.register('motion_notify', self._xspan_draw) 835 self.plotter._plotter.register('button_press', self._xspan_end) 876 836 877 837 def _xspan_draw(self,event): … … 922 882 self._thisregion['axes'].set_xlim(axlimx) 923 883 924 # check for the validity of plotter and get the plotter 925 theplotter = self._get_plotter() 926 927 theplotter._plotter.canvas.draw() 884 self.plotter._plotter.canvas.draw() 928 885 self._polygons.append(pregion) 929 886 srow = self._getrownum(self._thisregion['axes']) … … 938 895 939 896 # release event 940 theplotter._plotter.register('button_press',None)941 theplotter._plotter.register('motion_notify',None)897 self.plotter._plotter.register('button_press',None) 898 self.plotter._plotter.register('motion_notify',None) 942 899 # Clear up region selection 943 900 self._thisregion = None … … 945 902 self.xold = None 946 903 # finally recover region selection event 947 theplotter._plotter.register('button_press',self._add_region)904 self.plotter._plotter.register('button_press',self._add_region) 948 905 949 906 ### add panels to selections … … 954 911 if event.button != 1 or event.inaxes == None: 955 912 return 956 # check for the validity of plotter and get the plotter957 theplotter = self._get_plotter()958 959 913 selax = event.inaxes 960 914 # this row resolution assumes row panelling … … 965 919 shadow = Rectangle((0,0),1,1,facecolor='0.7',transform=selax.transAxes,visible=True) 966 920 self._polygons.append(selax.add_patch(shadow)) 967 # theplotter._plotter.show(False)968 theplotter._plotter.canvas.draw()921 #self.plotter._plotter.show(False) 922 self.plotter._plotter.canvas.draw() 969 923 asaplog.push("row "+str(irow)+" is selected") 970 924 ## check for region selection of the spectra and overwrite it. … … 1002 956 asaplog.push("Invalid panel specification") 1003 957 asaplog.post('ERROR') 1004 1005 # check for the validity of plotter and get the plotter 1006 theplotter = self._get_plotter() 1007 1008 strow = self._getrownum(theplotter._plotter.subplots[0]['axes']) 1009 enrow = self._getrownum(theplotter._plotter.subplots[-1]['axes']) 958 strow = self._getrownum(self.plotter._plotter.subplots[0]['axes']) 959 enrow = self._getrownum(self.plotter._plotter.subplots[-1]['axes']) 1010 960 for irow in range(int(strow),int(enrow)+1): 1011 961 if regions.has_key(str(irow)): 1012 ax = theplotter._plotter.subplots[irow - int(strow)]['axes']962 ax = self.plotter._plotter.subplots[irow - int(strow)]['axes'] 1013 963 mlist = regions.pop(str(irow)) 1014 964 # WORKAROUND for the issue axvspan started to reset xlim. … … 1020 970 del ax,mlist,axlimx 1021 971 if irow in panels: 1022 ax = theplotter._plotter.subplots[irow - int(strow)]['axes']972 ax = self.plotter._plotter.subplots[irow - int(strow)]['axes'] 1023 973 shadow = Rectangle((0,0),1,1,facecolor='0.7', 1024 974 transform=ax.transAxes,visible=True) 1025 975 self._polygons.append(ax.add_patch(shadow)) 1026 976 del ax,shadow 1027 theplotter._plotter.canvas.draw()977 self.plotter._plotter.canvas.draw() 1028 978 del regions,panels,strow,enrow 1029 979 … … 1033 983 for shadow in self._polygons: 1034 984 shadow.remove() 1035 if refresh: 1036 # check for the validity of plotter and get the plotter 1037 theplotter = self._get_plotter() 1038 theplotter._plotter.canvas.draw() 985 if refresh: self.plotter._plotter.canvas.draw() 1039 986 self._polygons = [] 1040 987 … … 1060 1007 asaplog.post('WARN') 1061 1008 return 1062 1063 1009 self._pause_buttons(operation="start",msg="Flagging data...") 1064 1010 self._flag_operation(rows=self._selpanels, … … 1069 1015 asaplog.push(sout) 1070 1016 del sout 1071 # check for the validity of plotter and get the plotter 1072 theplotter = self._get_plotter() 1073 1074 theplotter._ismodified = True 1017 self.plotter._ismodified = True 1075 1018 self._clearup_selections(refresh=False) 1076 1019 self._plot_page(pagemode="current") … … 1093 1036 asaplog.push(sout) 1094 1037 del sout 1095 1096 # check for the validity of plotter and get the plotter 1097 theplotter = self._get_plotter() 1098 theplotter._ismodified = True 1038 self.plotter._ismodified = True 1099 1039 self._clearup_selections(refresh=False) 1100 1040 self._plot_page(pagemode="current") … … 1104 1044 @asaplog_post_dec 1105 1045 def _flag_operation(self,rows=None,regions=None,unflag=False): 1106 # check for the validity of plotter and get the plotter 1107 theplotter = self._get_plotter() 1108 1109 scan = theplotter._data 1046 scan = self.plotter._data 1110 1047 if not scan: 1111 1048 asaplog.post() … … 1142 1079 @asaplog_post_dec 1143 1080 def _selected_stats(self,rows=None,regions=None): 1144 # check for the validity of plotter and get the plotter 1145 theplotter = self._get_plotter() 1146 1147 scan = theplotter._data 1081 scan = self.plotter._data 1148 1082 if not scan: 1149 1083 asaplog.post() … … 1230 1164 ### actual plotting of the new page 1231 1165 def _plot_page(self,pagemode="next"): 1232 # check for the validity of plotter and get the plotter 1233 theplotter = self._get_plotter() 1234 if theplotter._startrow <= 0: 1166 if self.plotter._startrow <= 0: 1235 1167 msg = "The page counter is reset due to chages of plot settings. " 1236 1168 msg += "Plotting from the first page." … … 1240 1172 goback = False 1241 1173 1242 theplotter._plotter.hold()1243 # theplotter._plotter.legend(1)1174 self.plotter._plotter.hold() 1175 #self.plotter._plotter.legend(1) 1244 1176 self._set_plot_counter(pagemode) 1245 theplotter._plot(theplotter._data)1177 self.plotter._plot(self.plotter._data) 1246 1178 self.set_pagecounter(self._get_pagenum()) 1247 theplotter._plotter.release()1248 theplotter._plotter.tidy()1249 theplotter._plotter.show(hardrefresh=False)1179 self.plotter._plotter.release() 1180 self.plotter._plotter.tidy() 1181 self.plotter._plotter.show(hardrefresh=False) 1250 1182 1251 1183 ### calculate the panel ID and start row to plot a page … … 1262 1194 # nothing necessary to plot the next page 1263 1195 return 1264 1265 # check for the validity of plotter and get the plotter1266 theplotter = self._get_plotter()1267 1268 1196 # set row and panel counters to those of the 1st panel of previous page 1269 1197 maxpanel = 25 1270 1198 # the ID of the last panel in current plot 1271 lastpanel = theplotter._ipanel1199 lastpanel = self.plotter._ipanel 1272 1200 # the number of current subplots 1273 currpnum = len( theplotter._plotter.subplots)1201 currpnum = len(self.plotter._plotter.subplots) 1274 1202 1275 1203 # the nuber of previous subplots … … 1280 1208 ## previous page 1281 1209 prevpnum = None 1282 if theplotter._rows and theplotter._cols:1210 if self.plotter._rows and self.plotter._cols: 1283 1211 # when user set layout 1284 prevpnum = theplotter._rows*theplotter._cols1212 prevpnum = self.plotter._rows*self.plotter._cols 1285 1213 else: 1286 1214 # no user specification … … 1290 1218 1291 1219 # set the pannel ID of the last panel of the prev(-prev) page 1292 theplotter._ipanel = start_ipanel-11293 if theplotter._panelling == 'r':1294 theplotter._startrow = start_ipanel1220 self.plotter._ipanel = start_ipanel-1 1221 if self.plotter._panelling == 'r': 1222 self.plotter._startrow = start_ipanel 1295 1223 else: 1296 1224 # the start row number of the next panel 1297 theplotter._startrow = theplotter._panelrows[start_ipanel]1225 self.plotter._startrow = self.plotter._panelrows[start_ipanel] 1298 1226 del lastpanel,currpnum,start_ipanel 1299 1227 … … 1310 1238 1311 1239 def _get_pagenum(self): 1312 # check for the validity of plotter and get the plotter1313 theplotter = self._get_plotter()1314 1240 # get the ID of last panel in the current page 1315 idlastpanel = theplotter._ipanel1241 idlastpanel = self.plotter._ipanel 1316 1242 # max panels in a page 1317 ppp = theplotter._plotter.rows*theplotter._plotter.cols1243 ppp = self.plotter._plotter.rows*self.plotter._plotter.cols 1318 1244 return int(idlastpanel/ppp)+1 1319 1245 -
trunk/python/env.py
r3112 r2704 2 2 """ 3 3 __all__ = ["is_casapy", "is_ipython", "setup_env", "get_revision", 4 "is_asap_cli" , "get_asap_revdate"]4 "is_asap_cli"] 5 5 6 6 import sys … … 52 52 os.environ["CASAPATH"] = "%s %s somwhere" % ( asapdata, plf) 53 53 54 def get_revi nfo_file():54 def get_revision(): 55 55 """Get the revision of the software. Only useful within casapy.""" 56 56 if not is_casapy: … … 59 59 versioninfo = sys.version_info 60 60 pyversion = '%s.%s'%(versioninfo[0],versioninfo[1]) 61 revinfo = None62 61 if os.path.isdir(casapath[0]+'/'+casapath[1]+'/python/%s/asap'%(pyversion)): 63 62 # for casa developer environment (linux or darwin) … … 69 68 else: 70 69 revinfo=casapath[0]+'/lib/python%s/asap/svninfo.txt'%(pyversion) 71 return revinfo72 73 def get_revision():74 revinfo=get_revinfo_file()75 70 if os.path.isfile(revinfo): 76 71 f = file(revinfo) … … 80 75 return revsionno.rstrip() 81 76 return ' unknown ' 82 83 84 def get_asap_revdate():85 revinfo=get_revinfo_file()86 if os.path.isfile(revinfo):87 f = file(revinfo)88 f.readline()89 f.readline()90 revdate=f.readline()91 return revdate.rstrip().lstrip()92 return 'unknown'93 94 -
trunk/python/sbseparator.py
r3112 r2704 9 9 from asap.selector import selector 10 10 from asap.asapgrid import asapgrid2 11 from asap._asap import SBSeparator 11 #from asap._asap import sidebandsep 12 12 13 13 class sbseparator: … … 21 21 information in scantable in sideband sparation. Frequency 22 22 setting of SIGNAL sideband is stored in output table for now. 23 - Flag information (stored in FLAGTRA) is ignored. 23 24 24 25 Example: … … 37 38 """ 38 39 def __init__(self, infiles): 39 self._separator = SBSeparator(infiles) 40 41 40 self.intables = None 41 self.signalShift = [] 42 self.imageShift = [] 43 self.dsbmode = True 44 self.getboth = False 45 self.rejlimit = 0.2 46 self.baseif = -1 47 self.freqtol = 10. 48 self.freqframe = "" 49 self.solveother = False 50 self.dirtol = [1.e-5, 1.e-5] # direction tolerance in rad (2 arcsec) 51 52 self.tables = [] 53 self.nshift = -1 54 self.nchan = -1 55 56 self.set_data(infiles) 57 58 #self.separator = sidebandsep() 59 60 @asaplog_post_dec 61 def set_data(self, infiles): 62 """ 63 Set data to be processed. 64 65 infiles : a list of filenames or scantables 66 """ 67 if not (type(infiles) in (list, tuple, numpy.ndarray)): 68 infiles = [infiles] 69 if isinstance(infiles[0], scantable): 70 # a list of scantable 71 for stab in infiles: 72 if not isinstance(stab, scantable): 73 asaplog.post() 74 raise TypeError, "Input data is not a list of scantables." 75 76 #self.separator._setdata(infiles) 77 self._reset_data() 78 self.intables = infiles 79 else: 80 # a list of filenames 81 for name in infiles: 82 if not os.path.exists(name): 83 asaplog.post() 84 raise ValueError, "Could not find input file '%s'" % name 85 86 #self.separator._setdataname(infiles) 87 self._reset_data() 88 self.intables = infiles 89 90 asaplog.push("%d files are set to process" % len(self.intables)) 91 92 93 def _reset_data(self): 94 del self.intables 95 self.intables = None 96 self.signalShift = [] 97 #self.imageShift = [] 98 self.tables = [] 99 self.nshift = -1 100 self.nchan = -1 101 102 @asaplog_post_dec 42 103 def set_frequency(self, baseif, freqtol, frame=""): 43 104 """ … … 46 107 Parameters: 47 108 - reference IFNO to process in the first table in the list 48 - frequency tolerance from reference IF to select data (string)109 - frequency tolerance from reference IF to select data 49 110 frame : frequency frame to select IF 50 111 """ 51 if type(freqtol) in (float, int): 52 freqtol = str(freqtol) 53 elif isinstance(freqtol, dict): 54 try: 55 freqtol = str(freqtol['value']) + freqtol['unit'] 56 except: 57 raise ValueError, "Invalid frequency tolerance." 58 self._separator.set_freq(baseif, freqtol, frame) 59 60 61 def set_dirtol(self, dirtol=["2arcsec", "2arcsec"]): 112 self._reset_if() 113 self.baseif = baseif 114 if isinstance(freqtol,dict) and freqtol["unit"] == "Hz": 115 if freqtol['value'] > 0.: 116 self.freqtol = freqtol 117 else: 118 asaplog.post() 119 asaplog.push("Frequency tolerance should be positive value.") 120 asaplog.post("ERROR") 121 return 122 else: 123 # torelance in channel unit 124 if freqtol > 0: 125 self.freqtol = float(freqtol) 126 else: 127 asaplog.post() 128 asaplog.push("Frequency tolerance should be positive value.") 129 asaplog.post("ERROR") 130 return 131 self.freqframe = frame 132 133 def _reset_if(self): 134 self.baseif = -1 135 self.freqtol = 0 136 self.freqframe = "" 137 self.signalShift = [] 138 #self.imageShift = [] 139 self.tables = [] 140 self.nshift = 0 141 self.nchan = -1 142 143 @asaplog_post_dec 144 def set_dirtol(self, dirtol=[1.e-5,1.e-5]): 62 145 """ 63 146 Set tolerance of direction to select data 64 147 """ 65 if isinstance(dirtol, str): 66 dirtol = [dirtol] 67 68 self._separator.set_dirtol(dirtol) 69 70 71 def set_shift(self, imageshift=[]): 148 # direction tolerance in rad 149 if not (type(dirtol) in [list, tuple, numpy.ndarray]): 150 dirtol = [dirtol, dirtol] 151 if len(dirtol) == 1: 152 dirtol = [dirtol[0], dirtol[0]] 153 if len(dirtol) > 1: 154 self.dirtol = dirtol[0:2] 155 else: 156 asaplog.post() 157 asaplog.push("Invalid direction tolerance. Should be a list of float in unit radian") 158 asaplog.post("ERROR") 159 return 160 asaplog.post("Set direction tolerance [%f, %f] (rad)" % \ 161 (self.dirtol[0], self.dirtol[1])) 162 163 @asaplog_post_dec 164 def set_shift(self, mode="DSB", imageshift=None): 72 165 """ 73 166 Set shift mode and channel shift of image band. 74 167 75 imageshift : a list of number of channels shifted in image 76 side band of each scantable. 77 If the shifts are not set, they are assumed to be 78 equal to those of signal side band, but in opposite 79 direction as usual by LO1 offsetting of DSB receivers. 80 """ 81 if not imageshift: 82 imageshift = [] 83 self._separator.set_shift(imageshift) 84 168 mode : shift mode ['DSB'|'SSB'] 169 When mode='DSB', imageshift is assumed to be equal 170 to the shift of signal sideband but in opposite direction. 171 imageshift : a list of number of channel shift in image band of 172 each scantable. valid only mode='SSB' 173 """ 174 if mode.upper().startswith("S"): 175 if not imageshift: 176 raise ValueError, "Need to set shift value of image sideband" 177 self.dsbmode = False 178 self.imageShift = imageshift 179 asaplog.push("Image sideband shift is set manually: %s" % str(self.imageShift)) 180 else: 181 # DSB mode 182 self.dsbmode = True 183 self.imageShift = [] 85 184 86 185 @asaplog_post_dec … … 89 188 Resolve both image and signal sideband when True. 90 189 """ 91 self. _separator.solve_both(flag)92 if flag:93 asaplog.push("Both signal and image sidebands will be solved and stored inseparate tables.")94 else: 95 asaplog.push("Only signal sideband will be solved and stored inan table.")190 self.getboth = flag 191 if self.getboth: 192 asaplog.push("Both signal and image sidebands are solved and output as separate tables.") 193 else: 194 asaplog.push("Only signal sideband is solved and output as an table.") 96 195 97 196 @asaplog_post_dec … … 100 199 Set rejection limit of solution. 101 200 """ 102 self._separator.set_limit(threshold) 201 #self.separator._setlimit(abs(threshold)) 202 self.rejlimit = threshold 203 asaplog.push("The threshold of rejection is set to %f" % self.rejlimit) 103 204 104 205 … … 109 210 when True. 110 211 """ 111 self. _separator.subtract_other(flag)212 self.solveother = flag 112 213 if flag: 113 214 asaplog.push("Expert mode: solution are obtained by subtraction of the other sideband.") 114 215 115 216 116 def set_lo1(self, lo1, frame="TOPO", reftime=-1, refdir=""):117 """118 Set LO1 frequency to calculate frequency of image sideband.119 120 lo1 : LO1 frequency121 frame : the frequency frame of LO1122 reftime : the reference time used in frequency frame conversion.123 refdir : the reference direction used in frequency frame conversion.124 """125 self._separator.set_lo1(lo1, frame, reftime, refdir)126 127 128 def set_lo1root(self, name):129 """130 Set MS name which stores LO1 frequency of signal side band.131 It is used to calculate frequency of image sideband.132 133 name : MS name which contains 'ASDM_SPECTRALWINDOW' and134 'ASDM_RECEIVER' tables.135 """136 self._separator.set_lo1root(name)137 138 217 @asaplog_post_dec 139 218 def separate(self, outname="", overwrite=False): … … 144 223 overwrite : overwrite existing table 145 224 """ 146 out_default = "sbseparated.asap" 147 if len(outname) == 0: 148 outname = out_default 149 asaplog.post() 150 asaplog.push("The output file name is not specified.") 151 asaplog.push("Using default name '%s'" % outname) 225 # List up valid scantables and IFNOs to convolve. 226 #self.separator._separate() 227 self._setup_shift() 228 #self._preprocess_tables() 229 230 nshift = len(self.tables) 231 signaltab = self._grid_outtable(self.tables[0].copy()) 232 if self.getboth: 233 imagetab = signaltab.copy() 234 235 rejrow = [] 236 for irow in xrange(signaltab.nrow()): 237 currpol = signaltab.getpol(irow) 238 currbeam = signaltab.getbeam(irow) 239 currdir = signaltab.get_directionval(irow) 240 spec_array, tabidx = self._get_specarray(polid=currpol,\ 241 beamid=currbeam,\ 242 dir=currdir) 243 #if not spec_array: 244 if len(tabidx) == 0: 245 asaplog.post() 246 asaplog.push("skipping row %d" % irow) 247 rejrow.append(irow) 248 continue 249 signal = self._solve_signal(spec_array, tabidx) 250 signaltab.set_spectrum(signal, irow) 251 if self.getboth: 252 image = self._solve_image(spec_array, tabidx) 253 imagetab.set_spectrum(image, irow) 254 255 # TODO: Need to remove rejrow form scantables here 256 signaltab.flag_row(rejrow) 257 if self.getboth: 258 imagetab.flag_row(rejrow) 259 260 if outname == "": 261 outname = "sbsepareted.asap" 262 signalname = outname + ".signalband" 263 if os.path.exists(signalname): 264 if not overwrite: 265 raise Exception, "Output file '%s' exists." % signalname 266 else: 267 shutil.rmtree(signalname) 268 signaltab.save(signalname) 269 if self.getboth: 270 # Warnings 271 asaplog.post() 272 asaplog.push("Saving IMAGE sideband.") 273 asaplog.push("Note, frequency information of IMAGE sideband cannot be properly filled so far. (future development)") 274 asaplog.push("Storing frequency setting of SIGNAL sideband in FREQUENCIES table for now.") 152 275 asaplog.post("WARN") 153 276 154 if os.path.exists(outname): 155 if overwrite: 156 asaplog.push("removing the old file '%s'" % outname) 157 shutil.rmtree(outname) 277 imagename = outname + ".imageband" 278 if os.path.exists(imagename): 279 if not overwrite: 280 raise Exception, "Output file '%s' exists." % imagename 281 else: 282 shutil.rmtree(imagename) 283 imagetab.save(imagename) 284 285 286 def _solve_signal(self, data, tabidx=None): 287 if not tabidx: 288 tabidx = range(len(data)) 289 290 tempshift = [] 291 dshift = [] 292 if self.solveother: 293 for idx in tabidx: 294 tempshift.append(-self.imageShift[idx]) 295 dshift.append(self.signalShift[idx] - self.imageShift[idx]) 296 else: 297 for idx in tabidx: 298 tempshift.append(-self.signalShift[idx]) 299 dshift.append(self.imageShift[idx] - self.signalShift[idx]) 300 301 shiftdata = numpy.zeros(data.shape, numpy.float) 302 for i in range(len(data)): 303 shiftdata[i] = self._shiftSpectrum(data[i], tempshift[i]) 304 ifftdata = self._Deconvolution(shiftdata, dshift, self.rejlimit) 305 result_image = self._combineResult(ifftdata) 306 if not self.solveother: 307 return result_image 308 result_signal = self._subtractOtherSide(shiftdata, dshift, result_image) 309 return result_signal 310 311 312 def _solve_image(self, data, tabidx=None): 313 if not tabidx: 314 tabidx = range(len(data)) 315 316 tempshift = [] 317 dshift = [] 318 if self.solveother: 319 for idx in tabidx: 320 tempshift.append(-self.signalShift[idx]) 321 dshift.append(self.imageShift[idx] - self.signalShift[idx]) 322 else: 323 for idx in tabidx: 324 tempshift.append(-self.imageShift[idx]) 325 dshift.append(self.signalShift[idx] - self.imageShift[idx]) 326 327 shiftdata = numpy.zeros(data.shape, numpy.float) 328 for i in range(len(data)): 329 shiftdata[i] = self._shiftSpectrum(data[i], tempshift[i]) 330 ifftdata = self._Deconvolution(shiftdata, dshift, self.rejlimit) 331 result_image = self._combineResult(ifftdata) 332 if not self.solveother: 333 return result_image 334 result_signal = self._subtractOtherSide(shiftdata, dshift, result_image) 335 return result_signal 336 337 @asaplog_post_dec 338 def _grid_outtable(self, table): 339 # Generate gridded table for output (Just to get rows) 340 gridder = asapgrid2(table) 341 gridder.setIF(self.baseif) 342 343 cellx = str(self.dirtol[0])+"rad" 344 celly = str(self.dirtol[1])+"rad" 345 dirarr = numpy.array(table.get_directionval()).transpose() 346 mapx = dirarr[0].max() - dirarr[0].min() 347 mapy = dirarr[1].max() - dirarr[1].min() 348 nx = max(1, numpy.ceil(mapx/self.dirtol[0])) 349 ny = max(1, numpy.ceil(mapy/self.dirtol[0])) 350 351 asaplog.push("Regrid output scantable with cell = [%s, %s]" % \ 352 (cellx, celly)) 353 gridder.defineImage(nx=nx, ny=ny, cellx=cellx, celly=celly) 354 gridder.setFunc(func='box', convsupport=1) 355 gridder.setWeight(weightType='uniform') 356 gridder.grid() 357 return gridder.getResult() 358 359 @asaplog_post_dec 360 def _get_specarray(self, polid=None, beamid=None, dir=None): 361 ntable = len(self.tables) 362 spec_array = numpy.zeros((ntable, self.nchan), numpy.float) 363 nspec = 0 364 asaplog.push("Start data selection by POL=%d, BEAM=%d, direction=[%f, %f]" % (polid, beamid, dir[0], dir[1])) 365 tabidx = [] 366 for itab in range(ntable): 367 tab = self.tables[itab] 368 # Select rows by POLNO and BEAMNO 369 try: 370 tab.set_selection(pols=[polid], beams=[beamid]) 371 if tab.nrow() > 0: tabidx.append(itab) 372 except: # no selection 373 asaplog.post() 374 asaplog.push("table %d - No spectrum ....skipping the table" % (itab)) 375 asaplog.post("WARN") 376 continue 377 378 # Select rows by direction 379 spec = numpy.zeros(self.nchan, numpy.float) 380 selrow = [] 381 for irow in range(tab.nrow()): 382 currdir = tab.get_directionval(irow) 383 if (abs(currdir[0] - dir[0]) > self.dirtol[0]) or \ 384 (abs(currdir[1] - dir[1]) > self.dirtol[1]): 385 continue 386 selrow.append(irow) 387 if len(selrow) == 0: 388 asaplog.post() 389 asaplog.push("table %d - No spectrum ....skipping the table" % (itab)) 390 asaplog.post("WARN") 391 continue 158 392 else: 159 asaplog.post() 160 asaplog.push("Output file '%s' already exists." % outname) 393 seltab = tab.copy() 394 seltab.set_selection(selector(rows=selrow)) 395 396 if tab.nrow() > 1: 397 asaplog.push("table %d - More than a spectrum selected. averaging rows..." % (itab)) 398 tab = seltab.average_time(scanav=False, weight="tintsys") 399 else: 400 tab = seltab 401 402 spec_array[nspec] = tab._getspectrum() 403 nspec += 1 404 405 if nspec != ntable: 406 asaplog.post() 407 #asaplog.push("Some tables has no spectrum with POL=%d BEAM=%d. averaging rows..." % (polid, beamid)) 408 asaplog.push("Could not find corresponding rows in some tables.") 409 asaplog.push("Number of spectra selected = %d (table: %d)" % (nspec, ntable)) 410 if nspec < 2: 411 asaplog.push("At least 2 spectra are necessary for convolution") 161 412 asaplog.post("ERROR") 162 return False 163 164 self._separator.separate(outname) 165 413 return False, tabidx 414 415 return spec_array[0:nspec], tabidx 416 417 418 @asaplog_post_dec 419 def _setup_shift(self): 420 ### define self.tables, self.signalShift, and self.imageShift 421 if not self.intables: 422 asaplog.post() 423 raise RuntimeError, "Input data is not defined." 424 #if self.baseif < 0: 425 # asaplog.post() 426 # raise RuntimeError, "Reference IFNO is not defined." 427 428 byname = False 429 #if not self.intables: 430 if isinstance(self.intables[0], str): 431 # A list of file name is given 432 if not os.path.exists(self.intables[0]): 433 asaplog.post() 434 raise RuntimeError, "Could not find '%s'" % self.intables[0] 435 436 stab = scantable(self.intables[0],average=False) 437 ntab = len(self.intables) 438 byname = True 439 else: 440 stab = self.intables[0] 441 ntab = len(self.intables) 442 443 if len(stab.getbeamnos()) > 1: 444 asaplog.post() 445 asaplog.push("Mult-beam data is not supported by this module.") 446 asaplog.post("ERROR") 447 return 448 449 valid_ifs = stab.getifnos() 450 if self.baseif < 0: 451 self.baseif = valid_ifs[0] 452 asaplog.post() 453 asaplog.push("IFNO is not selected. Using the first IF in the first scantable. Reference IFNO = %d" % (self.baseif)) 454 455 if not (self.baseif in valid_ifs): 456 asaplog.post() 457 errmsg = "IF%d does not exist in the first scantable" % \ 458 self.baseif 459 raise RuntimeError, errmsg 460 461 asaplog.push("Start selecting tables and IFNOs to solve.") 462 asaplog.push("Cheching frequency of the reference IF") 463 unit_org = stab.get_unit() 464 coord = stab._getcoordinfo() 465 frame_org = coord[1] 466 stab.set_unit("Hz") 467 if len(self.freqframe) > 0: 468 stab.set_freqframe(self.freqframe) 469 stab.set_selection(ifs=[self.baseif]) 470 spx = stab._getabcissa() 471 stab.set_selection() 472 basech0 = spx[0] 473 baseinc = spx[1]-spx[0] 474 self.nchan = len(spx) 475 if isinstance(self.freqtol, float): 476 vftol = abs(baseinc * self.freqtol) 477 self.freqtol = dict(value=vftol, unit="Hz") 478 else: 479 vftol = abs(self.freqtol['value']) 480 inctol = abs(baseinc/float(self.nchan)) 481 asaplog.push("Reference frequency setup (Table = 0, IFNO = %d): nchan = %d, chan0 = %f Hz, incr = %f Hz" % (self.baseif, self.nchan, basech0, baseinc)) 482 asaplog.push("Allowed frequency tolerance = %f Hz ( %f channels)" % (vftol, vftol/baseinc)) 483 poltype0 = stab.poltype() 484 485 self.tables = [] 486 self.signalShift = [] 487 if self.dsbmode: 488 self.imageShift = [] 489 490 for itab in range(ntab): 491 asaplog.push("Table %d:" % itab) 492 tab_selected = False 493 if itab > 0: 494 if byname: 495 stab = scantable(self.intables[itab],average=False) 496 else: 497 stab = self.intables[itab] 498 unit_org = stab.get_unit() 499 coord = stab._getcoordinfo() 500 frame_org = coord[1] 501 stab.set_unit("Hz") 502 if len(self.freqframe) > 0: 503 stab.set_freqframe(self.freqframe) 504 505 # Check POLTYPE should be equal to the first table. 506 if stab.poltype() != poltype0: 507 asaplog.post() 508 raise Exception, "POLTYPE should be equal to the first table." 509 # Multiple beam data may not handled properly 510 if len(stab.getbeamnos()) > 1: 511 asaplog.post() 512 asaplog.push("table contains multiple beams. It may not be handled properly.") 513 asaplog.push("WARN") 514 515 for ifno in stab.getifnos(): 516 stab.set_selection(ifs=[ifno]) 517 spx = stab._getabcissa() 518 if (len(spx) != self.nchan) or \ 519 (abs(spx[0]-basech0) > vftol) or \ 520 (abs(spx[1]-spx[0]-baseinc) > inctol): 521 continue 522 tab_selected = True 523 seltab = stab.copy() 524 seltab.set_unit(unit_org) 525 seltab.set_freqframe(frame_org) 526 self.tables.append(seltab) 527 self.signalShift.append((spx[0]-basech0)/baseinc) 528 if self.dsbmode: 529 self.imageShift.append(-self.signalShift[-1]) 530 asaplog.push("- IF%d selected: sideband shift = %16.12e channels" % (ifno, self.signalShift[-1])) 531 stab.set_selection() 532 stab.set_unit(unit_org) 533 stab.set_freqframe(frame_org) 534 if not tab_selected: 535 asaplog.post() 536 asaplog.push("No data selected in Table %d" % itab) 537 asaplog.post("WARN") 538 539 asaplog.push("Total number of IFs selected = %d" % len(self.tables)) 540 if len(self.tables) < 2: 541 asaplog.post() 542 raise RuntimeError, "At least 2 IFs are necessary for convolution!" 543 544 if not self.dsbmode and len(self.imageShift) != len(self.signalShift): 545 asaplog.post() 546 errmsg = "User defined channel shift of image sideband has %d elements, while selected IFNOs are %d" % (len(self.imageShift), len(self.signalShift)) 547 raise RuntimeError, errmsg 548 549 self.signalShift = numpy.array(self.signalShift) 550 self.imageShift = numpy.array(self.imageShift) 551 self.nshift = len(self.tables) 552 553 @asaplog_post_dec 554 def _preprocess_tables(self): 555 ### temporary method to preprocess data 556 ### Do time averaging for now. 557 for itab in range(len(self.tables)): 558 self.tables[itab] = self.tables[itab].average_time(scanav=False, weight="tintsys") 559 560 561 # def save(self, outfile, outform="ASAP", overwrite=False): 562 # if not overwrite and os.path.exists(outfile): 563 # raise RuntimeError, "Output file '%s' already exists" % outfile 564 # 565 # #self.separator._save(outfile, outform) 566 567 # def done(self): 568 # self.close() 569 570 # def close(self): 571 # pass 572 # #del self.separator 573 574 575 576 ######################################################################## 577 def _Deconvolution(self, data_array, shift, threshold=0.00000001): 578 FObs = [] 579 Reject = 0 580 nshift, nchan = data_array.shape 581 nspec = nshift*(nshift-1)/2 582 ifftObs = numpy.zeros((nspec, nchan), numpy.float) 583 for i in range(nshift): 584 F = FFT.fft(data_array[i]) 585 FObs.append(F) 586 z = 0 587 for i in range(nshift): 588 for j in range(i+1, nshift): 589 Fobs = (FObs[i]+FObs[j])/2.0 590 dX = (shift[j]-shift[i])*2.0*math.pi/float(self.nchan) 591 #print 'dX,i,j=',dX,i,j 592 for k in range(1,self.nchan): 593 if math.fabs(math.sin(dX*k)) > threshold: 594 Fobs[k] += ((FObs[i][k]-FObs[j][k])/2.0/(1.0-math.cos(dX*k))*math.sin(dX*k))*1.0j 595 else: Reject += 1 596 ifftObs[z] = FFT.ifft(Fobs) 597 z += 1 598 print 'Threshold=%s Reject=%d' % (threshold, Reject) 599 return ifftObs 600 601 def _combineResult(self, ifftObs): 602 nspec = len(ifftObs) 603 sum = ifftObs[0] 604 for i in range(1,nspec): 605 sum += ifftObs[i] 606 return(sum/float(nspec)) 607 608 def _subtractOtherSide(self, data_array, shift, Data): 609 sum = numpy.zeros(len(Data), numpy.float) 610 numSP = len(data_array) 611 for i in range(numSP): 612 SPsub = data_array[i] - Data 613 sum += self._shiftSpectrum(SPsub, -shift[i]) 614 return(sum/float(numSP)) 615 616 def _shiftSpectrum(self, data, Shift): 617 Out = numpy.zeros(self.nchan, numpy.float) 618 w2 = Shift % 1 619 w1 = 1.0 - w2 620 for i in range(self.nchan): 621 c1 = int((Shift + i) % self.nchan) 622 c2 = (c1 + 1) % self.nchan 623 Out[c1] += data[i] * w1 624 Out[c2] += data[i] * w2 625 return Out.copy() -
trunk/python/scantable.py
r3112 r2704 2 2 3 3 import os 4 import re5 4 import tempfile 6 5 import numpy … … 46 45 l=f.readline() 47 46 f.close() 48 match_pattern = '^Type = (Scantable)? *$' 49 if re.match(match_pattern,l): 47 if ( l.find('Scantable') != -1 ): 48 return True 49 elif ( l.find('Measurement Set') == -1 and 50 l.find('Image') == -1 ): 50 51 return True 51 52 else: … … 174 175 return str(showprogress).lower() + ',' + str(minnrow) 175 176 176 def pack_blinfo(blinfo, maxirow):177 """\178 convert a dictionary or a list of dictionaries of baseline info179 into a list of comma-separated strings.180 """181 if isinstance(blinfo, dict):182 res = do_pack_blinfo(blinfo, maxirow)183 return [res] if res != '' else []184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):185 res = []186 for i in xrange(len(blinfo)):187 resi = do_pack_blinfo(blinfo[i], maxirow)188 if resi != '':189 res.append(resi)190 return res191 192 def do_pack_blinfo(blinfo, maxirow):193 """\194 convert a dictionary of baseline info for a spectrum into195 a comma-separated string.196 """197 dinfo = {}198 for key in ['row', 'blfunc', 'masklist']:199 if blinfo.has_key(key):200 val = blinfo[key]201 if key == 'row':202 irow = val203 if isinstance(val, list) or isinstance(val, tuple):204 slval = []205 for i in xrange(len(val)):206 if isinstance(val[i], list) or isinstance(val[i], tuple):207 for j in xrange(len(val[i])):208 slval.append(str(val[i][j]))209 else:210 slval.append(str(val[i]))211 sval = ",".join(slval)212 else:213 sval = str(val)214 215 dinfo[key] = sval216 else:217 raise ValueError("'"+key+"' is missing in blinfo.")218 219 if irow >= maxirow: return ''220 221 for key in ['order', 'npiece', 'nwave']:222 if blinfo.has_key(key):223 val = blinfo[key]224 if isinstance(val, list) or isinstance(val, tuple):225 slval = []226 for i in xrange(len(val)):227 slval.append(str(val[i]))228 sval = ",".join(slval)229 else:230 sval = str(val)231 dinfo[key] = sval232 233 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}234 235 fspec_key = fspec_keys[dinfo['blfunc']]236 if not blinfo.has_key(fspec_key):237 raise ValueError("'"+fspec_key+"' is missing in blinfo.")238 239 clip_params_n = 0240 for key in ['clipthresh', 'clipniter']:241 if blinfo.has_key(key):242 clip_params_n += 1243 dinfo[key] = str(blinfo[key])244 245 if clip_params_n == 0:246 dinfo['clipthresh'] = '0.0'247 dinfo['clipniter'] = '0'248 elif clip_params_n != 2:249 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")250 251 lf_params_n = 0252 for key in ['thresh', 'edge', 'chan_avg_limit']:253 if blinfo.has_key(key):254 lf_params_n += 1255 val = blinfo[key]256 if isinstance(val, list) or isinstance(val, tuple):257 slval = []258 for i in xrange(len(val)):259 slval.append(str(val[i]))260 sval = ",".join(slval)261 else:262 sval = str(val)263 dinfo[key] = sval264 265 if lf_params_n == 3:266 dinfo['use_linefinder'] = 'true'267 elif lf_params_n == 0:268 dinfo['use_linefinder'] = 'false'269 dinfo['thresh'] = ''270 dinfo['edge'] = ''271 dinfo['chan_avg_limit'] = ''272 else:273 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")274 275 slblinfo = [dinfo['row'], dinfo['blfunc'], dinfo[fspec_key], dinfo['masklist'], \276 dinfo['clipthresh'], dinfo['clipniter'], \277 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]278 279 return ":".join(slblinfo)280 281 def parse_fitresult(sres):282 """\283 Parse the returned value of apply_bltable() or sub_baseline() and284 extract row number, the best-fit coefficients and rms, then pack285 them into a dictionary.286 The input value is generated by Scantable::packFittingResults() and287 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.288 """289 res = []290 for i in xrange(len(sres)):291 (srow, scoeff, srms) = sres[i].split(":")292 row = int(srow)293 rms = float(srms)294 lscoeff = scoeff.split(",")295 coeff = []296 for j in xrange(len(lscoeff)):297 coeff.append(float(lscoeff[j]))298 res.append({'row': row, 'coeff': coeff, 'rms': rms})299 300 return res301 302 def is_number(s):303 s = s.strip()304 res = True305 try:306 a = float(s)307 res = True308 except:309 res = False310 finally:311 return res312 313 def is_frequency(s):314 s = s.strip()315 return (s[-2:].lower() == "hz")316 317 def get_freq_by_string(s1, s2):318 if not (is_number(s1) and is_frequency(s2)):319 raise RuntimeError("Invalid input string.")320 321 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]322 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]323 324 s1 = s1.strip()325 s2 = s2.strip()326 327 prefix = s2[-3:-2]328 if is_number(prefix):329 res1 = float(s1)330 res2 = float(s2[:-2])331 else:332 factor = factor_list[prefix_list.index(prefix)]333 res1 = float(s1) * factor334 res2 = float(s2[:-3]) * factor335 336 return (res1, res2)337 338 def is_velocity(s):339 s = s.strip()340 return (s[-3:].lower() == "m/s")341 342 def get_velocity_by_string(s1, s2):343 if not (is_number(s1) and is_velocity(s2)):344 raise RuntimeError("Invalid input string.")345 346 # note that the default velocity unit is km/s347 prefix_list = [".", "k"]348 factor_list = [1e-3, 1.0]349 350 s1 = s1.strip()351 s2 = s2.strip()352 353 prefix = s2[-4:-3]354 if is_number(prefix): # in case velocity unit m/s355 res1 = float(s1) * 1e-3356 res2 = float(s2[:-3]) * 1e-3357 else:358 factor = factor_list[prefix_list.index(prefix)]359 res1 = float(s1) * factor360 res2 = float(s2[:-4]) * factor361 362 return (res1, res2)363 364 def get_frequency_by_velocity(restfreq, vel, doppler):365 # vel is in unit of km/s366 367 # speed of light368 vel_c = 299792.458369 370 import math371 r = vel / vel_c372 373 if doppler.lower() == 'radio':374 return restfreq * (1.0 - r)375 if doppler.lower() == 'optical':376 return restfreq / (1.0 + r)377 else:378 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))379 380 def get_restfreq_in_Hz(s_restfreq):381 value = 0.0382 unit = ""383 s = s_restfreq.replace(" ","")384 385 for i in range(len(s))[::-1]:386 if s[i].isalpha():387 unit = s[i] + unit388 else:389 value = float(s[0:i+1])390 break391 392 if (unit == "") or (unit.lower() == "hz"):393 return value394 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):395 unitprefix = unit[0]396 factor = 1.0397 398 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]399 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]400 factor = factor_list[prefix_list.index(unitprefix)]401 """402 if (unitprefix == 'a'):403 factor = 1.0e-18404 elif (unitprefix == 'f'):405 factor = 1.0e-15406 elif (unitprefix == 'p'):407 factor = 1.0e-12408 elif (unitprefix == 'n'):409 factor = 1.0e-9410 elif (unitprefix == 'u'):411 factor = 1.0e-6412 elif (unitprefix == 'm'):413 factor = 1.0e-3414 elif (unitprefix == 'k'):415 factor = 1.0e+3416 elif (unitprefix == 'M'):417 factor = 1.0e+6418 elif (unitprefix == 'G'):419 factor = 1.0e+9420 elif (unitprefix == 'T'):421 factor = 1.0e+12422 elif (unitprefix == 'P'):423 factor = 1.0e+15424 elif (unitprefix == 'E'):425 factor = 1.0e+18426 """427 return value*factor428 else:429 mesg = "wrong unit of restfreq."430 raise Exception, mesg431 432 def normalise_restfreq(in_restfreq):433 if isinstance(in_restfreq, float):434 return in_restfreq435 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):436 return float(in_restfreq)437 elif isinstance(in_restfreq, str):438 return get_restfreq_in_Hz(in_restfreq)439 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):440 if isinstance(in_restfreq, numpy.ndarray):441 if len(in_restfreq.shape) > 1:442 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."443 raise Exception, mesg444 445 res = []446 for i in xrange(len(in_restfreq)):447 elem = in_restfreq[i]448 if isinstance(elem, float):449 res.append(elem)450 elif isinstance(elem, int) or isinstance(elem, long):451 res.append(float(elem))452 elif isinstance(elem, str):453 res.append(get_restfreq_in_Hz(elem))454 elif isinstance(elem, dict):455 if isinstance(elem["value"], float):456 res.append(elem)457 elif isinstance(elem["value"], int):458 dictelem = {}459 dictelem["name"] = elem["name"]460 dictelem["value"] = float(elem["value"])461 res.append(dictelem)462 elif isinstance(elem["value"], str):463 dictelem = {}464 dictelem["name"] = elem["name"]465 dictelem["value"] = get_restfreq_in_Hz(elem["value"])466 res.append(dictelem)467 else:468 mesg = "restfreq elements must be float, int, or string."469 raise Exception, mesg470 return res471 else:472 mesg = "wrong type of restfreq given."473 raise Exception, mesg474 475 def set_restfreq(s, restfreq):476 rfset = (restfreq != '') and (restfreq != [])477 if rfset:478 s.set_restfreqs(normalise_restfreq(restfreq))479 480 177 class scantable(Scantable): 481 178 """\ … … 513 210 Default (false) is taken from rc file. 514 211 515 getpt: Whether to import direction from MS/POINTING516 table properly or not.517 This is effective only when filename is MS.518 The default (True) is to import direction519 from MS/POINTING.520 212 """ 521 213 if average is None: … … 552 244 self._fill([filename], unit, average, opts) 553 245 elif os.path.isfile(filename): 554 opts={'nro': {}} 555 nrokeys=['freqref'] 556 for key in nrokeys: 557 if key in args.keys(): 558 opts['nro'][key] = args[key] 559 self._fill([filename], unit, average, opts) 246 self._fill([filename], unit, average) 560 247 # only apply to new data not "copy constructor" 561 248 self.parallactify(parallactify) … … 891 578 892 579 @asaplog_post_dec 893 def stats(self, stat='stddev', mask=None, form='3.3f', row=None , skip_flaggedrow=False):580 def stats(self, stat='stddev', mask=None, form='3.3f', row=None): 894 581 """\ 895 582 Determine the specified statistic of the current beam/if/pol … … 909 596 row: row number of spectrum to process. 910 597 (default is None: for all rows) 911 912 skip_flaggedrow: if True, skip outputting text for flagged913 spectra. default is False.914 598 915 599 Example: … … 924 608 "number of channels. Please use setselection() " 925 609 "to select individual IFs") 610 rtnabc = False 611 if stat.lower().endswith('_abc'): rtnabc = True 926 612 getchan = False 927 613 if stat.lower().startswith('min') or stat.lower().startswith('max'): … … 929 615 getchan = True 930 616 statvals = [] 931 932 rtnabc = False 933 if stat.lower().endswith('_abc'): 934 rtnabc = True 935 else: 617 if not rtnabc: 936 618 if row == None: 937 619 statvals = self._math._stats(self, mask, stat) … … 959 641 statunit= '' 960 642 if getchan: 961 if self._is_all_chan_flagged(i): 962 if rtnabc: 963 statvals.append(None) 643 qx, qy = self.chan2data(rowno=i, chan=chan[i]) 644 if rtnabc: 645 statvals.append(qx['value']) 646 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])' 647 statunit= '['+qx['unit']+']' 964 648 else: 965 qx, qy = self.chan2data(rowno=i, chan=chan[i]) 966 if rtnabc: 967 statvals.append(qx['value']) 968 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])' 969 statunit= '['+qx['unit']+']' 970 else: 971 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])' 972 973 if self._is_all_chan_flagged(i): 974 if not rtnabc: 975 statvals[i] = None 976 if skip_flaggedrow: 977 continue 649 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])' 978 650 979 651 tm = self._gettime(i) … … 987 659 if len(rows) > 1: 988 660 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n' 989 if statvals[i] is None: 990 out += ('= None(flagged)') + ' '+refstr+'\n' 991 else: 992 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n' 661 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n' 993 662 else: 994 663 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n' 995 if statvals[0] is None: 996 out += ('= None(flagged)') + ' '+refstr+'\n' 997 else: 998 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n' 664 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n' 999 665 out += sep+"\n" 1000 666 … … 1017 683 asaplog.push(''.join(x), False) 1018 684 1019 if skip_flaggedrow:1020 nstatvals = len(statvals)1021 for i in reversed(xrange(nstatvals)):1022 if statvals[i] is None:1023 del statvals[i]1024 685 return statvals 1025 686 … … 1104 765 return self._get_column( self._gettsysspectrum, row ) 1105 766 1106 def set_tsys(self, values, row=-1):1107 """\1108 Set the Tsys value(s) of the given 'row' or the whole scantable1109 (selection).1110 1111 Parameters:1112 1113 values: a scalar or list (if Tsys is a vector) of Tsys value(s)1114 row: the row number to apply Tsys values to.1115 (default all rows)1116 1117 """1118 1119 if not hasattr(values, "__len__"):1120 values = [values]1121 self._settsys(values, row)1122 1123 767 def get_weather(self, row=-1): 1124 768 """\ 1125 Return the weather information .769 Return the weather informations. 1126 770 1127 771 Parameters: … … 1134 778 1135 779 """ 1136 if row >= len(self): 1137 raise IndexError("row out of range") 780 1138 781 values = self._get_column(self._get_weather, row) 1139 782 if row > -1: … … 1145 788 out = [] 1146 789 for r in values: 790 1147 791 out.append({'temperature': r[0], 1148 792 'pressure': r[1], 'humidity' : r[2], … … 1361 1005 self._add_history("set_feedtype", vars()) 1362 1006 1363 @asaplog_post_dec1364 def get_doppler(self):1365 """\1366 Get the doppler.1367 """1368 return self._getcoordinfo()[2]1369 1370 1007 @asaplog_post_dec 1371 1008 def set_doppler(self, doppler='RADIO'): … … 1834 1471 1835 1472 @asaplog_post_dec 1836 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):1837 """1838 Parse MS type spw/channel selection syntax.1839 1840 Parameters:1841 selectstring : A string expression of spw and channel selection.1842 Comma-separated expressions mean different spw -1843 channel combinations. Spws and channel selections1844 are partitioned by a colon ':'. In a single1845 selection expression, you can put multiple values1846 separated by semicolons ';'. Both for spw and1847 channel selection, allowed cases include single1848 value, blank('') or asterisk('*') to specify all1849 available values, two values connected with a1850 tilde ('~') to specify an inclusive range. Unit1851 strings for frequency or velocity can be added to1852 the tilde-connected values. For channel selection1853 expression, placing a '<' or a '>' is possible to1854 specify a semi-infinite interval as well.1855 1856 examples:1857 '' or '*' = all spws (all channels)1858 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)1859 '3:3~45;60' = channels 3 to 45 and 60 in spw 31860 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and1861 all channels in spw81862 '1.3~1.5GHz' = all spws whose central frequency1863 falls in frequency range between1864 1.3GHz and 1.5GHz.1865 '1.3~1.5GHz:1.3~1.5GHz' = channels which fall1866 between the specified1867 frequency range in spws1868 whose central frequency1869 falls in the specified1870 frequency range.1871 '1:-200~250km/s' = channels that fall between the1872 specified velocity range in1873 spw 1.1874 restfreq: the rest frequency.1875 examples: '115.2712GHz', 115271201800.01876 frame: an optional frame type, default 'LSRK'. Valid frames are:1877 'TOPO', 'LSRD', 'LSRK', 'BARY',1878 'GEO', 'GALACTO', 'LGROUP', 'CMB'1879 doppler: one of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'1880 Returns:1881 A dictionary of selected (valid) spw and masklist pairs,1882 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}1883 """1884 if not isinstance(selectstring, str):1885 asaplog.post()1886 asaplog.push("Expression of spw/channel selection must be a string.")1887 asaplog.post("ERROR")1888 1889 orig_unit = self.get_unit()1890 self.set_unit('channel')1891 1892 if restfreq is not None:1893 orig_molids = self._getmolidcol_list()1894 set_restfreq(self, restfreq)1895 1896 orig_coord = self._getcoordinfo()1897 1898 if frame is not None:1899 orig_frame = orig_coord[1]1900 self.set_freqframe(frame)1901 1902 if doppler is not None:1903 orig_doppler = orig_coord[2]1904 self.set_doppler(doppler)1905 1906 valid_ifs = self.getifnos()1907 1908 comma_sep = selectstring.split(",")1909 res = {}1910 1911 for cms_elem in comma_sep:1912 colon_sep = cms_elem.split(":")1913 1914 if (len(colon_sep) > 2):1915 raise RuntimeError("Invalid selection expression: more than two colons!")1916 1917 # parse spw expression and store result in spw_list.1918 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',1919 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),1920 # 'a~b*m/s' (where * can be '' or 'k') and also1921 # several of the above expressions connected with ';'.1922 1923 spw_list = []1924 1925 semicolon_sep = colon_sep[0].split(";")1926 1927 for scs_elem in semicolon_sep:1928 scs_elem = scs_elem.strip()1929 1930 lt_sep = scs_elem.split("<")1931 gt_sep = scs_elem.split(">")1932 ti_sep = scs_elem.split("~")1933 1934 lt_sep_length = len(lt_sep)1935 gt_sep_length = len(gt_sep)1936 ti_sep_length = len(ti_sep)1937 1938 len_product = lt_sep_length * gt_sep_length * ti_sep_length1939 1940 if (len_product > 2):1941 # '<', '>' and '~' must not coexist in a single spw expression1942 1943 raise RuntimeError("Invalid spw selection.")1944 1945 elif (len_product == 1):1946 # '', '*', or single spw number.1947 1948 if (scs_elem == "") or (scs_elem == "*"):1949 spw_list = valid_ifs[:] # deep copy1950 1951 else: # single number1952 expr = int(scs_elem)1953 spw_list.append(expr)1954 if expr not in valid_ifs:1955 asaplog.push("Invalid spw given. Ignored.")1956 1957 else: # (len_product == 2)1958 # namely, one of '<', '>' or '~' appears just once.1959 1960 if (lt_sep_length == 2): # '<a'1961 if is_number(lt_sep[1]):1962 no_valid_spw = True1963 for i in valid_ifs:1964 if (i < float(lt_sep[1])):1965 spw_list.append(i)1966 no_valid_spw = False1967 1968 if no_valid_spw:1969 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")1970 1971 else:1972 raise RuntimeError("Invalid spw selection.")1973 1974 elif (gt_sep_length == 2): # '>a'1975 if is_number(gt_sep[1]):1976 no_valid_spw = True1977 for i in valid_ifs:1978 if (i > float(gt_sep[1])):1979 spw_list.append(i)1980 no_valid_spw = False1981 1982 if no_valid_spw:1983 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")1984 1985 else:1986 raise RuntimeError("Invalid spw selection.")1987 1988 else: # (ti_sep_length == 2) where both boundaries inclusive1989 expr0 = ti_sep[0].strip()1990 expr1 = ti_sep[1].strip()1991 1992 if is_number(expr0) and is_number(expr1):1993 # 'a~b'1994 expr_pmin = min(float(expr0), float(expr1))1995 expr_pmax = max(float(expr0), float(expr1))1996 has_invalid_spw = False1997 no_valid_spw = True1998 1999 for i in valid_ifs:2000 if (expr_pmin <= i) and (i <= expr_pmax):2001 spw_list.append(i)2002 no_valid_spw = False2003 else:2004 has_invalid_spw = True2005 2006 if has_invalid_spw:2007 msg = "Invalid spw is given. Ignored."2008 asaplog.push(msg)2009 asaplog.post()2010 2011 if no_valid_spw:2012 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")2013 2014 elif is_number(expr0) and is_frequency(expr1):2015 # 'a~b*Hz'2016 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)2017 expr_fmin = min(expr_f0, expr_f1)2018 expr_fmax = max(expr_f0, expr_f1)2019 no_valid_spw = True2020 2021 for coord in self._get_coordinate_list():2022 spw = coord['if']2023 2024 """2025 expr_p0 = coord['coord'].to_pixel(expr_f0)2026 expr_p1 = coord['coord'].to_pixel(expr_f1)2027 expr_pmin = min(expr_p0, expr_p1)2028 expr_pmax = max(expr_p0, expr_p1)2029 2030 pmin = 0.02031 pmax = float(self.nchan(spw) - 1)2032 2033 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):2034 spw_list.append(spw)2035 no_valid_spw = False2036 """2037 2038 crd = coord['coord']2039 fhead = crd.to_frequency(0)2040 ftail = crd.to_frequency(self.nchan(spw) - 1)2041 fcen = (fhead + ftail) / 2.02042 2043 if ((expr_fmin <= fcen) and (fcen <= expr_fmax)):2044 spw_list.append(spw)2045 no_valid_spw = False2046 2047 if no_valid_spw:2048 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")2049 2050 elif is_number(expr0) and is_velocity(expr1):2051 # 'a~b*m/s'2052 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)2053 expr_vmin = min(expr_v0, expr_v1)2054 expr_vmax = max(expr_v0, expr_v1)2055 no_valid_spw = True2056 2057 for coord in self._get_coordinate_list():2058 spw = coord['if']2059 2060 """2061 pmin = 0.02062 pmax = float(self.nchan(spw) - 1)2063 2064 vel0 = coord['coord'].to_velocity(pmin)2065 vel1 = coord['coord'].to_velocity(pmax)2066 2067 vmin = min(vel0, vel1)2068 vmax = max(vel0, vel1)2069 2070 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):2071 spw_list.append(spw)2072 no_valid_spw = False2073 """2074 2075 crd = coord['coord']2076 fhead = crd.to_frequency(0)2077 ftail = crd.to_frequency(self.nchan(spw) - 1)2078 fcen = (fhead + ftail) / 2.02079 vcen = crd.to_velocity(crd.to_pixel(fcen))2080 2081 if ((expr_vmin <= vcen) and (vcen <= expr_vmax)):2082 spw_list.append(spw)2083 no_valid_spw = False2084 2085 if no_valid_spw:2086 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")2087 2088 else:2089 # cases such as 'aGHz~bkm/s' are not allowed now2090 raise RuntimeError("Invalid spw selection.")2091 2092 # check spw list and remove invalid ones.2093 # if no valid spw left, emit ValueError.2094 if len(spw_list) == 0:2095 raise ValueError("No valid spw in given range.")2096 2097 # parse channel expression and store the result in crange_list.2098 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be2099 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')2100 # and also several of the above expressions connected with ';'.2101 2102 for spw in spw_list:2103 pmin = 0.02104 pmax = float(self.nchan(spw) - 1)2105 2106 molid = self._getmolidcol_list()[self.get_first_rowno_by_if(spw)]2107 2108 if (len(colon_sep) == 1):2109 # no expression for channel selection,2110 # which means all channels are to be selected.2111 crange_list = [[pmin, pmax]]2112 2113 else: # (len(colon_sep) == 2)2114 crange_list = []2115 2116 found = False2117 for i in self._get_coordinate_list():2118 if (i['if'] == spw):2119 coord = i['coord']2120 found = True2121 break2122 2123 if found:2124 semicolon_sep = colon_sep[1].split(";")2125 for scs_elem in semicolon_sep:2126 scs_elem = scs_elem.strip()2127 2128 ti_sep = scs_elem.split("~")2129 ti_sep_length = len(ti_sep)2130 2131 if (ti_sep_length > 2):2132 raise RuntimeError("Invalid channel selection.")2133 2134 elif (ti_sep_length == 1):2135 if (scs_elem == "") or (scs_elem == "*"):2136 # '' and '*' for all channels2137 crange_list = [[pmin, pmax]]2138 break2139 elif (is_number(scs_elem)):2140 # single channel given2141 crange_list.append([float(scs_elem), float(scs_elem)])2142 else:2143 raise RuntimeError("Invalid channel selection.")2144 2145 else: #(ti_sep_length == 2)2146 expr0 = ti_sep[0].strip()2147 expr1 = ti_sep[1].strip()2148 2149 if is_number(expr0) and is_number(expr1):2150 # 'a~b'2151 expr_pmin = min(float(expr0), float(expr1))2152 expr_pmax = max(float(expr0), float(expr1))2153 2154 elif is_number(expr0) and is_frequency(expr1):2155 # 'a~b*Hz'2156 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)2157 expr_p0 = coord.to_pixel(expr_f0)2158 expr_p1 = coord.to_pixel(expr_f1)2159 expr_pmin = min(expr_p0, expr_p1)2160 expr_pmax = max(expr_p0, expr_p1)2161 2162 elif is_number(expr0) and is_velocity(expr1):2163 # 'a~b*m/s'2164 restf = self.get_restfreqs()[molid][0]2165 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)2166 dppl = self.get_doppler()2167 expr_f0 = get_frequency_by_velocity(restf, expr_v0, dppl)2168 expr_f1 = get_frequency_by_velocity(restf, expr_v1, dppl)2169 expr_p0 = coord.to_pixel(expr_f0)2170 expr_p1 = coord.to_pixel(expr_f1)2171 expr_pmin = min(expr_p0, expr_p1)2172 expr_pmax = max(expr_p0, expr_p1)2173 2174 else:2175 # cases such as 'aGHz~bkm/s' are not allowed now2176 raise RuntimeError("Invalid channel selection.")2177 2178 cmin = max(pmin, expr_pmin)2179 cmax = min(pmax, expr_pmax)2180 # if the given range of channel selection has overwrap with2181 # that of current spw, output the overwrap area.2182 if (cmin <= cmax):2183 cmin = float(int(cmin + 0.5))2184 cmax = float(int(cmax + 0.5))2185 crange_list.append([cmin, cmax])2186 2187 if (len(crange_list) == 0):2188 crange_list.append([])2189 2190 if (len(crange_list[0]) > 0):2191 if res.has_key(spw):2192 res[spw].extend(crange_list)2193 else:2194 res[spw] = crange_list2195 2196 for spw in res.keys():2197 if spw in valid_ifs:2198 # remove duplicated channel ranges2199 for i in reversed(xrange(len(res[spw]))):2200 for j in xrange(i):2201 if ((res[spw][i][0]-res[spw][j][1])*(res[spw][i][1]-res[spw][j][0]) <= 0) or \2202 (min(abs(res[spw][i][0]-res[spw][j][1]),abs(res[spw][j][0]-res[spw][i][1])) == 1):2203 asaplog.post()2204 merge_warn_mesg = "Spw " + str(spw) + ": overwrapping channel ranges are merged."2205 asaplog.push(merge_warn_mesg)2206 asaplog.post('WARN')2207 res[spw][j][0] = min(res[spw][i][0], res[spw][j][0])2208 res[spw][j][1] = max(res[spw][i][1], res[spw][j][1])2209 res[spw].pop(i)2210 break2211 else:2212 del res[spw]2213 2214 if len(res) == 0:2215 raise RuntimeError("No valid spw.")2216 2217 # restore original values2218 self.set_unit(orig_unit)2219 if restfreq is not None:2220 self._setmolidcol_list(orig_molids)2221 if frame is not None:2222 self.set_freqframe(orig_frame)2223 if doppler is not None:2224 self.set_doppler(orig_doppler)2225 2226 return res2227 2228 @asaplog_post_dec2229 def get_first_rowno_by_if(self, ifno):2230 found = False2231 for irow in xrange(self.nrow()):2232 if (self.getif(irow) == ifno):2233 res = irow2234 found = True2235 break2236 2237 if not found: raise RuntimeError("No valid spw.")2238 2239 return res2240 2241 @asaplog_post_dec2242 def _get_coordinate_list(self):2243 res = []2244 spws = self.getifnos()2245 for spw in spws:2246 elem = {}2247 elem['if'] = spw2248 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))2249 res.append(elem)2250 2251 return res2252 2253 @asaplog_post_dec2254 1473 def parse_maskexpr(self, maskstring): 2255 1474 """ … … 2282 1501 maskstring = str(valid_ifs)[1:-1] 2283 1502 ## split each selection "IF range[:CHAN range]" 2284 # split maskstring by "<spaces>,<spaces>" 2285 comma_sep = re.compile('\s*,\s*') 2286 sellist = comma_sep.split(maskstring) 2287 # separator by "<spaces>:<spaces>" 2288 collon_sep = re.compile('\s*:\s*') 1503 sellist = maskstring.split(',') 2289 1504 for currselstr in sellist: 2290 selset = c ollon_sep.split(currselstr)1505 selset = currselstr.split(':') 2291 1506 # spw and mask string (may include ~, < or >) 2292 1507 spwmasklist = self._parse_selection(selset[0], typestr='integer', … … 2422 1637 if smode == 'r': 2423 1638 minidx = 0 2424 maxidx = self.nrow() -11639 maxidx = self.nrow() 2425 1640 else: 2426 1641 idx = getattr(self,"get"+mode+"nos")() … … 2428 1643 maxidx = max(idx) 2429 1644 del idx 2430 # split selexpr by "<spaces>,<spaces>" 2431 comma_sep = re.compile('\s*,\s*') 2432 sellist = comma_sep.split(selexpr) 1645 sellist = selexpr.split(',') 2433 1646 idxlist = [] 2434 1647 for currselstr in sellist: … … 2438 1651 for thelist in currlist: 2439 1652 idxlist += range(thelist[0],thelist[1]+1) 2440 # remove duplicated elements after first ones2441 for i in reversed(xrange(len(idxlist))):2442 if idxlist.index(idxlist[i]) < i:2443 idxlist.pop(i)2444 2445 # remove elements outside range [minidx, maxidx] for smode='r'2446 if smode == 'r':2447 for i in reversed(xrange(len(idxlist))):2448 if (idxlist[i] < minidx) or (idxlist[i] > maxidx):2449 idxlist.pop(i)2450 2451 1653 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist)) 2452 1654 asaplog.push(msg) … … 2474 1676 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]] 2475 1677 """ 2476 # split selstr by '<spaces>;<spaces>' 2477 semi_sep = re.compile('\s*;\s*') 2478 selgroups = semi_sep.split(selstr) 1678 selgroups = selstr.split(';') 2479 1679 sellists = [] 2480 1680 if typestr.lower().startswith('int'): … … 2485 1685 2486 1686 for currsel in selgroups: 2487 if currsel.strip() == '*' or len(currsel.strip()) == 0:2488 minsel = minval2489 maxsel = maxval2490 1687 if currsel.find('~') > 0: 2491 1688 # val0 <= x <= val1 2492 1689 minsel = formatfunc(currsel.split('~')[0].strip()) 2493 maxsel = formatfunc(currsel.split('~')[1].strip()) 1690 maxsel = formatfunc(currsel.split('~')[1].strip()) 2494 1691 elif currsel.strip().find('<=') > -1: 2495 1692 bound = currsel.split('<=') … … 2710 1907 2711 1908 @asaplog_post_dec 2712 def history(self, filename=None , nrows=-1, start=0):1909 def history(self, filename=None): 2713 1910 """\ 2714 1911 Print the history. Optionally to a file. … … 2719 1916 2720 1917 """ 2721 n = self._historylength() 2722 if nrows == -1: 2723 nrows = n 2724 if start+nrows > n: 2725 nrows = nrows-start 2726 if n > 1000 and nrows == n: 2727 nrows = 1000 2728 start = n-1000 2729 asaplog.push("Warning: History has {0} entries. Displaying last " 2730 "1000".format(n)) 2731 hist = list(self._gethistory(nrows, start)) 1918 hist = list(self._gethistory()) 2732 1919 out = "-"*80 2733 1920 for h in hist: 2734 if not h.strip(): 2735 continue 2736 if h.find("---") >-1: 2737 continue 1921 if h.startswith("---"): 1922 out = "\n".join([out, h]) 2738 1923 else: 2739 1924 items = h.split("##") … … 2748 1933 s = i.split("=") 2749 1934 out += "\n %s = %s" % (s[0], s[1]) 2750 out = "\n".join([out, " *"*80])1935 out = "\n".join([out, "-"*80]) 2751 1936 if filename is not None: 2752 1937 if filename is "": 2753 1938 filename = 'scantable_history.txt' 1939 import os 2754 1940 filename = os.path.expandvars(os.path.expanduser(filename)) 2755 1941 if not os.path.isdir(filename): … … 2766 1952 # 2767 1953 @asaplog_post_dec 2768 def average_time(self, mask=None, scanav=False, weight='tint', align=False, 2769 avmode="NONE"): 1954 def average_time(self, mask=None, scanav=False, weight='tint', align=False): 2770 1955 """\ 2771 1956 Return the (time) weighted average of a scan. Scans will be averaged … … 2795 1980 align: align the spectra in velocity before averaging. It takes 2796 1981 the time of the first spectrum as reference time. 2797 avmode: 'SOURCE' - also select by source name - or2798 'NONE' (default). Not applicable for scanav=True or2799 weight=median2800 1982 2801 1983 Example:: … … 2808 1990 weight = weight or 'TINT' 2809 1991 mask = mask or () 2810 scanav = (scanav and 'SCAN') or avmode.upper()1992 scanav = (scanav and 'SCAN') or 'NONE' 2811 1993 scan = (self, ) 2812 1994 2813 1995 if align: 2814 1996 scan = (self.freq_align(insitu=False), ) 2815 asaplog.push("Note: Alignment is don on a source-by-source basis")2816 asaplog.push("Note: Averaging (by default) is not")2817 # we need to set it to SOURCE averaging here2818 1997 s = None 2819 1998 if weight.upper() == 'MEDIAN': … … 3360 2539 raise RuntimeError(msg) 3361 2540 3362 @asaplog_post_dec 3363 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None): 3364 """\ 3365 Subtract baseline based on parameters written in Baseline Table. 3366 3367 Parameters: 3368 insitu: if True, baseline fitting/subtraction is done 3369 in-situ. If False, a new scantable with 3370 baseline subtracted is returned. Actually, 3371 format of the returned value depends on both 3372 insitu and retfitres (see below). 3373 The default is taken from .asaprc (False) 3374 retfitres: if True, the results of baseline fitting (i.e., 3375 coefficients and rms) are returned. 3376 default is False. 3377 The format of the returned value of this 3378 function varies as follows: 3379 (1) in case insitu=True and retfitres=True: 3380 fitting result. 3381 (2) in case insitu=True and retfitres=False: 3382 None. 3383 (3) in case insitu=False and retfitres=True: 3384 a dictionary containing a new scantable 3385 (with baseline subtracted) and the fitting 3386 results. 3387 (4) in case insitu=False and retfitres=False: 3388 a new scantable (with baseline subtracted). 3389 inbltable: name of input baseline table. The row number of 3390 scantable and that of inbltable must be 3391 identical. 3392 outbltable: name of output baseline table where baseline 3393 parameters and fitting results recorded. 3394 default is ''(no output). 3395 overwrite: if True when an existing baseline table is 3396 specified for outbltable, overwrites it. 3397 Otherwise there is no harm. 3398 default is False. 3399 """ 3400 3401 try: 3402 varlist = vars() 3403 if retfitres is None: retfitres = False 3404 if inbltable is None: raise ValueError("bltable missing.") 3405 if outbltable is None: outbltable = '' 3406 if overwrite is None: overwrite = False 3407 3408 if insitu is None: insitu = rcParams['insitu'] 3409 if insitu: 3410 workscan = self 3411 else: 3412 workscan = self.copy() 3413 3414 sres = workscan._apply_bltable(inbltable, 3415 retfitres, 3416 outbltable, 3417 os.path.exists(outbltable), 3418 overwrite) 3419 if retfitres: res = parse_fitresult(sres) 3420 3421 workscan._add_history('apply_bltable', varlist) 3422 3423 if insitu: 3424 self._assign(workscan) 3425 if retfitres: 3426 return res 3427 else: 3428 return None 3429 else: 3430 if retfitres: 3431 return {'scantable': workscan, 'fitresults': res} 3432 else: 3433 return workscan 3434 3435 except RuntimeError, e: 3436 raise_fitting_failure_exception(e) 3437 3438 @asaplog_post_dec 3439 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None): 3440 """\ 3441 Subtract baseline based on parameters written in the input list. 3442 3443 Parameters: 3444 insitu: if True, baseline fitting/subtraction is done 3445 in-situ. If False, a new scantable with 3446 baseline subtracted is returned. Actually, 3447 format of the returned value depends on both 3448 insitu and retfitres (see below). 3449 The default is taken from .asaprc (False) 3450 retfitres: if True, the results of baseline fitting (i.e., 3451 coefficients and rms) are returned. 3452 default is False. 3453 The format of the returned value of this 3454 function varies as follows: 3455 (1) in case insitu=True and retfitres=True: 3456 fitting result. 3457 (2) in case insitu=True and retfitres=False: 3458 None. 3459 (3) in case insitu=False and retfitres=True: 3460 a dictionary containing a new scantable 3461 (with baseline subtracted) and the fitting 3462 results. 3463 (4) in case insitu=False and retfitres=False: 3464 a new scantable (with baseline subtracted). 3465 blinfo: baseline parameter set stored in a dictionary 3466 or a list of dictionary. Each dictionary 3467 corresponds to each spectrum and must contain 3468 the following keys and values: 3469 'row': row number, 3470 'blfunc': function name. available ones include 3471 'poly', 'chebyshev', 'cspline' and 3472 'sinusoid', 3473 'order': maximum order of polynomial. needed 3474 if blfunc='poly' or 'chebyshev', 3475 'npiece': number or piecewise polynomial. 3476 needed if blfunc='cspline', 3477 'nwave': a list of sinusoidal wave numbers. 3478 needed if blfunc='sinusoid', and 3479 'masklist': min-max windows for channel mask. 3480 the specified ranges will be used 3481 for fitting. 3482 bltable: name of output baseline table where baseline 3483 parameters and fitting results recorded. 3484 default is ''(no output). 3485 overwrite: if True when an existing baseline table is 3486 specified for bltable, overwrites it. 3487 Otherwise there is no harm. 3488 default is False. 3489 3490 Example: 3491 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5, 3492 'masklist':[[10,350],[352,510]]}, 3493 {'row':1, 'blfunc':'cspline', 'npiece':3, 3494 'masklist':[[3,16],[19,404],[407,511]]} 3495 ]) 3496 3497 the first spectrum (row=0) will be fitted with polynomial 3498 of order=5 and the next one (row=1) will be fitted with cubic 3499 spline consisting of 3 pieces. 3500 """ 3501 3502 try: 3503 varlist = vars() 3504 if retfitres is None: retfitres = False 3505 if blinfo is None: blinfo = [] 3506 if bltable is None: bltable = '' 3507 if overwrite is None: overwrite = False 3508 3509 if insitu is None: insitu = rcParams['insitu'] 3510 if insitu: 3511 workscan = self 3512 else: 3513 workscan = self.copy() 3514 3515 nrow = workscan.nrow() 3516 3517 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow) 3518 3519 sres = workscan._sub_baseline(in_blinfo, 3520 retfitres, 3521 bltable, 3522 os.path.exists(bltable), 3523 overwrite) 3524 if retfitres: res = parse_fitresult(sres) 3525 3526 workscan._add_history('sub_baseline', varlist) 3527 3528 if insitu: 3529 self._assign(workscan) 3530 if retfitres: 3531 return res 3532 else: 3533 return None 3534 else: 3535 if retfitres: 3536 return {'scantable': workscan, 'fitresults': res} 3537 else: 3538 return workscan 3539 3540 except RuntimeError, e: 3541 raise_fitting_failure_exception(e) 3542 3543 @asaplog_post_dec 3544 def calc_aic(self, value=None, blfunc=None, order=None, mask=None, 3545 whichrow=None, uselinefinder=None, edge=None, 3546 threshold=None, chan_avg_limit=None): 3547 """\ 3548 Calculates and returns model selection criteria for a specified 3549 baseline model and a given spectrum data. 3550 Available values include Akaike Information Criterion (AIC), the 3551 corrected Akaike Information Criterion (AICc) by Sugiura(1978), 3552 Bayesian Information Criterion (BIC) and the Generalised Cross 3553 Validation (GCV). 3554 3555 Parameters: 3556 value: name of model selection criteria to calculate. 3557 available ones include 'aic', 'aicc', 'bic' and 3558 'gcv'. default is 'aicc'. 3559 blfunc: baseline function name. available ones include 3560 'chebyshev', 'cspline' and 'sinusoid'. 3561 default is 'chebyshev'. 3562 order: parameter for basline function. actually stands for 3563 order of polynomial (order) for 'chebyshev', 3564 number of spline pieces (npiece) for 'cspline' and 3565 maximum wave number for 'sinusoid', respectively. 3566 default is 5 (which is also the default order value 3567 for [auto_]chebyshev_baseline()). 3568 mask: an optional mask. default is []. 3569 whichrow: row number. default is 0 (the first row) 3570 uselinefinder: use sd.linefinder() to flag out line regions 3571 default is True. 3572 edge: an optional number of channel to drop at 3573 the edge of spectrum. If only one value is 3574 specified, the same number will be dropped 3575 from both sides of the spectrum. Default 3576 is to keep all channels. Nested tuples 3577 represent individual edge selection for 3578 different IFs (a number of spectral channels 3579 can be different) 3580 default is (0, 0). 3581 threshold: the threshold used by line finder. It is 3582 better to keep it large as only strong lines 3583 affect the baseline solution. 3584 default is 3. 3585 chan_avg_limit: a maximum number of consequtive spectral 3586 channels to average during the search of 3587 weak and broad lines. The default is no 3588 averaging (and no search for weak lines). 3589 If such lines can affect the fitted baseline 3590 (e.g. a high order polynomial is fitted), 3591 increase this parameter (usually values up 3592 to 8 are reasonable). Most users of this 3593 method should find the default value sufficient. 3594 default is 1. 3595 3596 Example: 3597 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0) 3598 """ 3599 3600 try: 3601 varlist = vars() 3602 3603 if value is None: value = 'aicc' 3604 if blfunc is None: blfunc = 'chebyshev' 3605 if order is None: order = 5 3606 if mask is None: mask = [] 3607 if whichrow is None: whichrow = 0 3608 if uselinefinder is None: uselinefinder = True 3609 if edge is None: edge = (0, 0) 3610 if threshold is None: threshold = 3 3611 if chan_avg_limit is None: chan_avg_limit = 1 3612 3613 return self._calc_aic(value, blfunc, order, mask, 3614 whichrow, uselinefinder, edge, 3615 threshold, chan_avg_limit) 3616 3617 except RuntimeError, e: 3618 raise_fitting_failure_exception(e) 3619 3620 @asaplog_post_dec 3621 def sinusoid_baseline(self, mask=None, applyfft=None, 2541 2542 @asaplog_post_dec 2543 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, 3622 2544 fftmethod=None, fftthresh=None, 3623 addwn=None, rejwn=None, 3624 insitu=None, 3625 clipthresh=None, clipniter=None, 3626 plot=None, 3627 getresidual=None, 3628 showprogress=None, minnrow=None, 3629 outlog=None, 3630 blfile=None, csvformat=None, 3631 bltable=None): 2545 addwn=None, rejwn=None, clipthresh=None, 2546 clipniter=None, plot=None, 2547 getresidual=None, showprogress=None, 2548 minnrow=None, outlog=None, blfile=None, csvformat=None): 3632 2549 """\ 3633 2550 Return a scan which has been baselined (all rows) with sinusoidal … … 3635 2552 3636 2553 Parameters: 2554 insitu: if False a new scantable is returned. 2555 Otherwise, the scaling is done in-situ 2556 The default is taken from .asaprc (False) 3637 2557 mask: an optional mask 3638 2558 applyfft: if True use some method, such as FFT, to find … … 3666 2586 wave numbers which are specified both in addwn 3667 2587 and rejwn will NOT be used. default is []. 3668 insitu: if False a new scantable is returned.3669 Otherwise, the scaling is done in-situ3670 The default is taken from .asaprc (False)3671 2588 clipthresh: Clipping threshold. (default is 3.0, unit: sigma) 3672 2589 clipniter: maximum number of iteration of 'clipthresh'-sigma … … 3688 2605 (default is '': no file/logger output) 3689 2606 csvformat: if True blfile is csv-formatted, default is False. 3690 bltable: name of a baseline table where fitting results3691 (coefficients, rms, etc.) are to be written.3692 if given, fitting results will NOT be output to3693 scantable (insitu=True) or None will be3694 returned (insitu=False).3695 (default is "": no table output)3696 2607 3697 2608 Example: … … 3715 2626 workscan = self.copy() 3716 2627 2628 #if mask is None: mask = [True for i in xrange(workscan.nchan())] 3717 2629 if mask is None: mask = [] 3718 2630 if applyfft is None: applyfft = True … … 3730 2642 if blfile is None: blfile = '' 3731 2643 if csvformat is None: csvformat = False 3732 if bltable is None: bltable = '' 3733 3734 sapplyfft = 'true' if applyfft else 'false' 3735 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()]) 3736 3737 scsvformat = 'T' if csvformat else 'F' 2644 2645 if csvformat: 2646 scsvformat = "T" 2647 else: 2648 scsvformat = "F" 3738 2649 3739 2650 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method. 3740 workscan._sinusoid_baseline(mask, 3741 fftinfo, 3742 #applyfft, fftmethod.lower(), 3743 #str(fftthresh).lower(), 2651 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), 2652 str(fftthresh).lower(), 3744 2653 workscan._parse_wn(addwn), 3745 2654 workscan._parse_wn(rejwn), … … 3748 2657 pack_progress_params(showprogress, 3749 2658 minnrow), 3750 outlog, scsvformat+blfile, 3751 bltable) 2659 outlog, scsvformat+blfile) 3752 2660 workscan._add_history('sinusoid_baseline', varlist) 3753 3754 if bltable == '': 3755 if insitu: 3756 self._assign(workscan) 3757 else: 3758 return workscan 3759 else: 3760 if not insitu: 3761 return None 2661 2662 if insitu: 2663 self._assign(workscan) 2664 else: 2665 return workscan 3762 2666 3763 2667 except RuntimeError, e: … … 3766 2670 3767 2671 @asaplog_post_dec 3768 def auto_sinusoid_baseline(self, mask=None, applyfft=None,2672 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, 3769 2673 fftmethod=None, fftthresh=None, 3770 addwn=None, rejwn=None, 3771 insitu=None, 3772 clipthresh=None, clipniter=None, 3773 edge=None, threshold=None, chan_avg_limit=None, 3774 plot=None, 3775 getresidual=None, 3776 showprogress=None, minnrow=None, 3777 outlog=None, 3778 blfile=None, csvformat=None, 3779 bltable=None): 2674 addwn=None, rejwn=None, clipthresh=None, 2675 clipniter=None, edge=None, threshold=None, 2676 chan_avg_limit=None, plot=None, 2677 getresidual=None, showprogress=None, 2678 minnrow=None, outlog=None, blfile=None, csvformat=None): 3780 2679 """\ 3781 2680 Return a scan which has been baselined (all rows) with sinusoidal … … 3785 2684 3786 2685 Parameters: 2686 insitu: if False a new scantable is returned. 2687 Otherwise, the scaling is done in-situ 2688 The default is taken from .asaprc (False) 3787 2689 mask: an optional mask retreived from scantable 3788 2690 applyfft: if True use some method, such as FFT, to find … … 3816 2718 wave numbers which are specified both in addwn 3817 2719 and rejwn will NOT be used. default is []. 3818 insitu: if False a new scantable is returned.3819 Otherwise, the scaling is done in-situ3820 The default is taken from .asaprc (False)3821 2720 clipthresh: Clipping threshold. (default is 3.0, unit: sigma) 3822 2721 clipniter: maximum number of iteration of 'clipthresh'-sigma … … 3858 2757 (default is "": no file/logger output) 3859 2758 csvformat: if True blfile is csv-formatted, default is False. 3860 bltable: name of a baseline table where fitting results3861 (coefficients, rms, etc.) are to be written.3862 if given, fitting results will NOT be output to3863 scantable (insitu=True) or None will be3864 returned (insitu=False).3865 (default is "": no table output)3866 2759 3867 2760 Example: … … 3882 2775 workscan = self.copy() 3883 2776 2777 #if mask is None: mask = [True for i in xrange(workscan.nchan())] 3884 2778 if mask is None: mask = [] 3885 2779 if applyfft is None: applyfft = True … … 3900 2794 if blfile is None: blfile = '' 3901 2795 if csvformat is None: csvformat = False 3902 if bltable is None: bltable = '' 3903 3904 sapplyfft = 'true' if applyfft else 'false' 3905 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()]) 3906 3907 scsvformat = 'T' if csvformat else 'F' 2796 2797 if csvformat: 2798 scsvformat = "T" 2799 else: 2800 scsvformat = "F" 3908 2801 3909 2802 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method. 3910 workscan._auto_sinusoid_baseline(mask, 3911 fftinfo, 2803 workscan._auto_sinusoid_baseline(mask, applyfft, 2804 fftmethod.lower(), 2805 str(fftthresh).lower(), 3912 2806 workscan._parse_wn(addwn), 3913 2807 workscan._parse_wn(rejwn), … … 3918 2812 pack_progress_params(showprogress, 3919 2813 minnrow), 3920 outlog, scsvformat+blfile , bltable)2814 outlog, scsvformat+blfile) 3921 2815 workscan._add_history("auto_sinusoid_baseline", varlist) 3922 3923 if bltable == '': 3924 if insitu: 3925 self._assign(workscan) 3926 else: 3927 return workscan 3928 else: 3929 if not insitu: 3930 return None 2816 2817 if insitu: 2818 self._assign(workscan) 2819 else: 2820 return workscan 3931 2821 3932 2822 except RuntimeError, e: … … 3934 2824 3935 2825 @asaplog_post_dec 3936 def cspline_baseline(self, mask=None, npiece=None, insitu=None,2826 def cspline_baseline(self, insitu=None, mask=None, npiece=None, 3937 2827 clipthresh=None, clipniter=None, plot=None, 3938 2828 getresidual=None, showprogress=None, minnrow=None, 3939 outlog=None, blfile=None, csvformat=None, 3940 bltable=None): 2829 outlog=None, blfile=None, csvformat=None): 3941 2830 """\ 3942 2831 Return a scan which has been baselined (all rows) by cubic spline … … 3944 2833 3945 2834 Parameters: 3946 mask: An optional mask3947 npiece: Number of pieces. (default is 2)3948 2835 insitu: If False a new scantable is returned. 3949 2836 Otherwise, the scaling is done in-situ 3950 2837 The default is taken from .asaprc (False) 2838 mask: An optional mask 2839 npiece: Number of pieces. (default is 2) 3951 2840 clipthresh: Clipping threshold. (default is 3.0, unit: sigma) 3952 2841 clipniter: maximum number of iteration of 'clipthresh'-sigma … … 3968 2857 (default is "": no file/logger output) 3969 2858 csvformat: if True blfile is csv-formatted, default is False. 3970 bltable: name of a baseline table where fitting results3971 (coefficients, rms, etc.) are to be written.3972 if given, fitting results will NOT be output to3973 scantable (insitu=True) or None will be3974 returned (insitu=False).3975 (default is "": no table output)3976 2859 3977 2860 Example: … … 3995 2878 workscan = self.copy() 3996 2879 2880 #if mask is None: mask = [True for i in xrange(workscan.nchan())] 3997 2881 if mask is None: mask = [] 3998 2882 if npiece is None: npiece = 2 … … 4005 2889 if outlog is None: outlog = False 4006 2890 if blfile is None: blfile = '' 4007 if csvformat is None: csvformat = False 4008 if bltable is None: bltable = '' 4009 4010 scsvformat = 'T' if csvformat else 'F' 2891 if csvformat is None: csvformat = False 2892 2893 if csvformat: 2894 scsvformat = "T" 2895 else: 2896 scsvformat = "F" 4011 2897 4012 2898 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method. 4013 workscan._cspline_baseline(mask, npiece, 4014 clipthresh, clipniter, 2899 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, 4015 2900 getresidual, 4016 2901 pack_progress_params(showprogress, 4017 2902 minnrow), 4018 outlog, scsvformat+blfile, 4019 bltable) 2903 outlog, scsvformat+blfile) 4020 2904 workscan._add_history("cspline_baseline", varlist) 4021 4022 if bltable == '': 4023 if insitu: 4024 self._assign(workscan) 4025 else: 4026 return workscan 4027 else: 4028 if not insitu: 4029 return None 2905 2906 if insitu: 2907 self._assign(workscan) 2908 else: 2909 return workscan 4030 2910 4031 2911 except RuntimeError, e: … … 4033 2913 4034 2914 @asaplog_post_dec 4035 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,2915 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, 4036 2916 clipthresh=None, clipniter=None, 4037 2917 edge=None, threshold=None, chan_avg_limit=None, 4038 2918 getresidual=None, plot=None, 4039 2919 showprogress=None, minnrow=None, outlog=None, 4040 blfile=None, csvformat=None , bltable=None):2920 blfile=None, csvformat=None): 4041 2921 """\ 4042 2922 Return a scan which has been baselined (all rows) by cubic spline … … 4046 2926 4047 2927 Parameters: 4048 mask: an optional mask retreived from scantable4049 npiece: Number of pieces. (default is 2)4050 2928 insitu: if False a new scantable is returned. 4051 2929 Otherwise, the scaling is done in-situ 4052 2930 The default is taken from .asaprc (False) 2931 mask: an optional mask retreived from scantable 2932 npiece: Number of pieces. (default is 2) 4053 2933 clipthresh: Clipping threshold. (default is 3.0, unit: sigma) 4054 2934 clipniter: maximum number of iteration of 'clipthresh'-sigma … … 4090 2970 (default is "": no file/logger output) 4091 2971 csvformat: if True blfile is csv-formatted, default is False. 4092 bltable: name of a baseline table where fitting results4093 (coefficients, rms, etc.) are to be written.4094 if given, fitting results will NOT be output to4095 scantable (insitu=True) or None will be4096 returned (insitu=False).4097 (default is "": no table output)4098 2972 4099 2973 Example: … … 4129 3003 if blfile is None: blfile = '' 4130 3004 if csvformat is None: csvformat = False 4131 if bltable is None: bltable = '' 4132 4133 scsvformat = 'T' if csvformat else 'F' 3005 3006 if csvformat: 3007 scsvformat = "T" 3008 else: 3009 scsvformat = "F" 4134 3010 4135 3011 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method. 4136 workscan._auto_cspline_baseline(mask, npiece, 4137 clip thresh, clipniter,3012 workscan._auto_cspline_baseline(mask, npiece, clipthresh, 3013 clipniter, 4138 3014 normalise_edge_param(edge), 4139 3015 threshold, … … 4141 3017 pack_progress_params(showprogress, 4142 3018 minnrow), 4143 outlog, 4144 scsvformat+blfile, 4145 bltable) 3019 outlog, scsvformat+blfile) 4146 3020 workscan._add_history("auto_cspline_baseline", varlist) 4147 4148 if bltable == '': 4149 if insitu: 4150 self._assign(workscan) 4151 else: 4152 return workscan 4153 else: 4154 if not insitu: 4155 return None 3021 3022 if insitu: 3023 self._assign(workscan) 3024 else: 3025 return workscan 4156 3026 4157 3027 except RuntimeError, e: … … 4159 3029 4160 3030 @asaplog_post_dec 4161 def chebyshev_baseline(self, mask=None, order=None, insitu=None,3031 def chebyshev_baseline(self, insitu=None, mask=None, order=None, 4162 3032 clipthresh=None, clipniter=None, plot=None, 4163 3033 getresidual=None, showprogress=None, minnrow=None, 4164 outlog=None, blfile=None, csvformat=None, 4165 bltable=None): 3034 outlog=None, blfile=None, csvformat=None): 4166 3035 """\ 4167 3036 Return a scan which has been baselined (all rows) by Chebyshev polynomials. 4168 3037 4169 3038 Parameters: 4170 mask: An optional mask4171 order: the maximum order of Chebyshev polynomial (default is 5)4172 3039 insitu: If False a new scantable is returned. 4173 3040 Otherwise, the scaling is done in-situ 4174 3041 The default is taken from .asaprc (False) 3042 mask: An optional mask 3043 order: the maximum order of Chebyshev polynomial (default is 5) 4175 3044 clipthresh: Clipping threshold. (default is 3.0, unit: sigma) 4176 3045 clipniter: maximum number of iteration of 'clipthresh'-sigma … … 4192 3061 (default is "": no file/logger output) 4193 3062 csvformat: if True blfile is csv-formatted, default is False. 4194 bltable: name of a baseline table where fitting results4195 (coefficients, rms, etc.) are to be written.4196 if given, fitting results will NOT be output to4197 scantable (insitu=True) or None will be4198 returned (insitu=False).4199 (default is "": no table output)4200 3063 4201 3064 Example: … … 4219 3082 workscan = self.copy() 4220 3083 3084 #if mask is None: mask = [True for i in xrange(workscan.nchan())] 4221 3085 if mask is None: mask = [] 4222 3086 if order is None: order = 5 … … 4229 3093 if outlog is None: outlog = False 4230 3094 if blfile is None: blfile = '' 4231 if csvformat is None: csvformat = False 4232 if bltable is None: bltable = '' 4233 4234 scsvformat = 'T' if csvformat else 'F' 3095 if csvformat is None: csvformat = False 3096 3097 if csvformat: 3098 scsvformat = "T" 3099 else: 3100 scsvformat = "F" 4235 3101 4236 3102 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method. 4237 workscan._chebyshev_baseline(mask, order, 4238 clipthresh, clipniter, 3103 workscan._chebyshev_baseline(mask, order, clipthresh, clipniter, 4239 3104 getresidual, 4240 3105 pack_progress_params(showprogress, 4241 3106 minnrow), 4242 outlog, scsvformat+blfile, 4243 bltable) 3107 outlog, scsvformat+blfile) 4244 3108 workscan._add_history("chebyshev_baseline", varlist) 4245 4246 if bltable == '': 4247 if insitu: 4248 self._assign(workscan) 4249 else: 4250 return workscan 4251 else: 4252 if not insitu: 4253 return None 3109 3110 if insitu: 3111 self._assign(workscan) 3112 else: 3113 return workscan 4254 3114 4255 3115 except RuntimeError, e: … … 4257 3117 4258 3118 @asaplog_post_dec 4259 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,3119 def auto_chebyshev_baseline(self, insitu=None, mask=None, order=None, 4260 3120 clipthresh=None, clipniter=None, 4261 3121 edge=None, threshold=None, chan_avg_limit=None, 4262 3122 getresidual=None, plot=None, 4263 3123 showprogress=None, minnrow=None, outlog=None, 4264 blfile=None, csvformat=None , bltable=None):3124 blfile=None, csvformat=None): 4265 3125 """\ 4266 3126 Return a scan which has been baselined (all rows) by Chebyshev polynomials. … … 4269 3129 4270 3130 Parameters: 4271 mask: an optional mask retreived from scantable4272 order: the maximum order of Chebyshev polynomial (default is 5)4273 3131 insitu: if False a new scantable is returned. 4274 3132 Otherwise, the scaling is done in-situ 4275 3133 The default is taken from .asaprc (False) 3134 mask: an optional mask retreived from scantable 3135 order: the maximum order of Chebyshev polynomial (default is 5) 4276 3136 clipthresh: Clipping threshold. (default is 3.0, unit: sigma) 4277 3137 clipniter: maximum number of iteration of 'clipthresh'-sigma … … 4313 3173 (default is "": no file/logger output) 4314 3174 csvformat: if True blfile is csv-formatted, default is False. 4315 bltable: name of a baseline table where fitting results4316 (coefficients, rms, etc.) are to be written.4317 if given, fitting results will NOT be output to4318 scantable (insitu=True) or None will be4319 returned (insitu=False).4320 (default is "": no table output)4321 3175 4322 3176 Example: … … 4337 3191 workscan = self.copy() 4338 3192 3193 #if mask is None: mask = [True for i in xrange(workscan.nchan())] 4339 3194 if mask is None: mask = [] 4340 3195 if order is None: order = 5 … … 4351 3206 if blfile is None: blfile = '' 4352 3207 if csvformat is None: csvformat = False 4353 if bltable is None: bltable = '' 4354 4355 scsvformat = 'T' if csvformat else 'F' 3208 3209 if csvformat: 3210 scsvformat = "T" 3211 else: 3212 scsvformat = "F" 4356 3213 4357 3214 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method. 4358 workscan._auto_chebyshev_baseline(mask, order, 4359 clip thresh, clipniter,3215 workscan._auto_chebyshev_baseline(mask, order, clipthresh, 3216 clipniter, 4360 3217 normalise_edge_param(edge), 4361 3218 threshold, … … 4363 3220 pack_progress_params(showprogress, 4364 3221 minnrow), 4365 outlog, scsvformat+blfile, 4366 bltable) 3222 outlog, scsvformat+blfile) 4367 3223 workscan._add_history("auto_chebyshev_baseline", varlist) 4368 4369 if bltable == '': 4370 if insitu: 4371 self._assign(workscan) 4372 else: 4373 return workscan 4374 else: 4375 if not insitu: 4376 return None 3224 3225 if insitu: 3226 self._assign(workscan) 3227 else: 3228 return workscan 4377 3229 4378 3230 except RuntimeError, e: … … 4380 3232 4381 3233 @asaplog_post_dec 4382 def poly_baseline(self, mask=None, order=None, insitu=None, 4383 clipthresh=None, clipniter=None, plot=None, 3234 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None, 4384 3235 getresidual=None, showprogress=None, minnrow=None, 4385 outlog=None, blfile=None, csvformat=None, 4386 bltable=None): 3236 outlog=None, blfile=None, csvformat=None): 4387 3237 """\ 4388 3238 Return a scan which has been baselined (all rows) by a polynomial. 4389 3239 Parameters: 4390 mask: an optional mask4391 order: the order of the polynomial (default is 0)4392 3240 insitu: if False a new scantable is returned. 4393 3241 Otherwise, the scaling is done in-situ 4394 3242 The default is taken from .asaprc (False) 4395 clipthresh: Clipping threshold. (default is 3.0, unit: sigma) 4396 clipniter: maximum number of iteration of 'clipthresh'-sigma 4397 clipping (default is 0) 3243 mask: an optional mask 3244 order: the order of the polynomial (default is 0) 4398 3245 plot: plot the fit and the residual. In this each 4399 3246 indivual fit has to be approved, by typing 'y' … … 4411 3258 (default is "": no file/logger output) 4412 3259 csvformat: if True blfile is csv-formatted, default is False. 4413 bltable: name of a baseline table where fitting results4414 (coefficients, rms, etc.) are to be written.4415 if given, fitting results will NOT be output to4416 scantable (insitu=True) or None will be4417 returned (insitu=False).4418 (default is "": no table output)4419 3260 4420 3261 Example: … … 4434 3275 workscan = self.copy() 4435 3276 3277 #if mask is None: mask = [True for i in \ 3278 # xrange(workscan.nchan())] 4436 3279 if mask is None: mask = [] 4437 3280 if order is None: order = 0 4438 if clipthresh is None: clipthresh = 3.04439 if clipniter is None: clipniter = 04440 3281 if plot is None: plot = False 4441 3282 if getresidual is None: getresidual = True … … 4443 3284 if minnrow is None: minnrow = 1000 4444 3285 if outlog is None: outlog = False 4445 if blfile is None: blfile = ''3286 if blfile is None: blfile = "" 4446 3287 if csvformat is None: csvformat = False 4447 if bltable is None: bltable = '' 4448 4449 scsvformat = 'T' if csvformat else 'F' 3288 3289 if csvformat: 3290 scsvformat = "T" 3291 else: 3292 scsvformat = "F" 4450 3293 4451 3294 if plot: … … 4511 3354 blf.close() 4512 3355 else: 4513 workscan._poly_baseline(mask, order, 4514 clipthresh, clipniter, # 4515 getresidual, 3356 workscan._poly_baseline(mask, order, getresidual, 4516 3357 pack_progress_params(showprogress, 4517 3358 minnrow), 4518 outlog, scsvformat+blfile, 4519 bltable) # 3359 outlog, scsvformat+blfile) 4520 3360 4521 3361 workscan._add_history("poly_baseline", varlist) … … 4530 3370 4531 3371 @asaplog_post_dec 4532 def auto_poly_baseline(self, mask=None, order=None, insitu=None, 4533 clipthresh=None, clipniter=None, 4534 edge=None, threshold=None, chan_avg_limit=None, 4535 getresidual=None, plot=None, 4536 showprogress=None, minnrow=None, outlog=None, 4537 blfile=None, csvformat=None, bltable=None): 3372 def auto_poly_baseline(self, mask=None, order=None, edge=None, 3373 threshold=None, chan_avg_limit=None, 3374 plot=None, insitu=None, 3375 getresidual=None, showprogress=None, 3376 minnrow=None, outlog=None, blfile=None, csvformat=None): 4538 3377 """\ 4539 3378 Return a scan which has been baselined (all rows) by a polynomial. … … 4542 3381 4543 3382 Parameters: 4544 mask: an optional mask retreived from scantable4545 order: the order of the polynomial (default is 0)4546 3383 insitu: if False a new scantable is returned. 4547 3384 Otherwise, the scaling is done in-situ 4548 3385 The default is taken from .asaprc (False) 4549 clipthresh: Clipping threshold. (default is 3.0, unit: sigma) 4550 clipniter: maximum number of iteration of 'clipthresh'-sigma 4551 clipping (default is 0) 3386 mask: an optional mask retreived from scantable 3387 order: the order of the polynomial (default is 0) 4552 3388 edge: an optional number of channel to drop at 4553 3389 the edge of spectrum. If only one value is … … 4585 3421 (default is "": no file/logger output) 4586 3422 csvformat: if True blfile is csv-formatted, default is False. 4587 bltable: name of a baseline table where fitting results4588 (coefficients, rms, etc.) are to be written.4589 if given, fitting results will NOT be output to4590 scantable (insitu=True) or None will be4591 returned (insitu=False).4592 (default is "": no table output)4593 3423 4594 3424 Example: … … 4606 3436 workscan = self.copy() 4607 3437 3438 #if mask is None: mask = [True for i in xrange(workscan.nchan())] 4608 3439 if mask is None: mask = [] 4609 3440 if order is None: order = 0 4610 if clipthresh is None: clipthresh = 3.04611 if clipniter is None: clipniter = 04612 3441 if edge is None: edge = (0, 0) 4613 3442 if threshold is None: threshold = 3 … … 4620 3449 if blfile is None: blfile = '' 4621 3450 if csvformat is None: csvformat = False 4622 if bltable is None: bltable = '' 4623 4624 scsvformat = 'T' if csvformat else 'F' 3451 3452 if csvformat: 3453 scsvformat = "T" 3454 else: 3455 scsvformat = "F" 4625 3456 4626 3457 edge = normalise_edge_param(edge) … … 4692 3523 if outblfile: blf.close() 4693 3524 else: 4694 workscan._auto_poly_baseline(mask, order, 4695 clipthresh, clipniter, 4696 edge, threshold, 3525 workscan._auto_poly_baseline(mask, order, edge, threshold, 4697 3526 chan_avg_limit, getresidual, 4698 3527 pack_progress_params(showprogress, 4699 3528 minnrow), 4700 outlog, scsvformat+blfile ,4701 bltable) 3529 outlog, scsvformat+blfile) 3530 4702 3531 workscan._add_history("auto_poly_baseline", varlist) 4703 4704 if bltable == '': 4705 if insitu: 4706 self._assign(workscan) 4707 else: 4708 return workscan 4709 else: 4710 if not insitu: 4711 return None 3532 3533 if insitu: 3534 self._assign(workscan) 3535 else: 3536 return workscan 4712 3537 4713 3538 except RuntimeError, e: … … 4819 3644 self._math._setinsitu(insitu) 4820 3645 varlist = vars() 4821 s = scantable(self._math._unaryop(self, offset, "ADD", False , False))3646 s = scantable(self._math._unaryop(self, offset, "ADD", False)) 4822 3647 s._add_history("add", varlist) 4823 3648 if insitu: … … 4842 3667 tsys: if True (default) then apply the operation to Tsys 4843 3668 as well as the data 3669 4844 3670 """ 4845 3671 if insitu is None: insitu = rcParams['insitu'] … … 4852 3678 numpy.ndarray): 4853 3679 from asapmath import _array2dOp 4854 s = _array2dOp( self, factor, "MUL", tsys, insitu , True)3680 s = _array2dOp( self, factor, "MUL", tsys, insitu ) 4855 3681 else: 4856 3682 s = scantable( self._math._arrayop( self, factor, 4857 "MUL", tsys , True) )3683 "MUL", tsys ) ) 4858 3684 else: 4859 s = scantable(self._math._unaryop(self, factor, "MUL", tsys , True))3685 s = scantable(self._math._unaryop(self, factor, "MUL", tsys)) 4860 3686 s._add_history("scale", varlist) 4861 3687 if insitu: … … 4904 3730 self._add_history("set_sourcetype", varlist) 4905 3731 4906 4907 def set_sourcename(self, name):4908 varlist = vars()4909 self._setsourcename(name)4910 self._add_history("set_sourcename", varlist)4911 4912 3732 @asaplog_post_dec 4913 3733 @preserve_selection … … 4946 3766 s = None 4947 3767 if mode.lower() == "paired": 4948 from asap._asap import srctype4949 3768 sel = self.get_selection() 4950 #sel.set_query("SRCTYPE==psoff") 4951 sel.set_types(srctype.psoff) 3769 sel.set_query("SRCTYPE==psoff") 4952 3770 self.set_selection(sel) 4953 3771 offs = self.copy() 4954 #sel.set_query("SRCTYPE==pson") 4955 sel.set_types(srctype.pson) 3772 sel.set_query("SRCTYPE==pson") 4956 3773 self.set_selection(sel) 4957 3774 ons = self.copy() … … 5070 3887 if other == 0.0: 5071 3888 raise ZeroDivisionError("Dividing by zero is not recommended") 5072 s = scantable(self._math._unaryop(self, other, mode, False , True))3889 s = scantable(self._math._unaryop(self, other, mode, False)) 5073 3890 elif isinstance(other, list) or isinstance(other, numpy.ndarray): 5074 3891 if isinstance(other[0], list) \ 5075 3892 or isinstance(other[0], numpy.ndarray): 5076 3893 from asapmath import _array2dOp 5077 s = _array2dOp(self, other, mode, False) 5078 else: 5079 s = scantable(self._math._arrayop(self, other, mode, False, True)) 3894 s = _array2dOp( self, other, mode, False ) 3895 else: 3896 s = scantable( self._math._arrayop( self, other, 3897 mode, False ) ) 5080 3898 else: 5081 3899 raise TypeError("Other input is not a scantable or float value") … … 5110 3928 self.set_selection(basesel+sel) 5111 3929 nans = numpy.isnan(self._getspectrum(0)) 5112 if numpy.any(nans): 5113 bnans = [ bool(v) for v in nans] 5114 self.flag(bnans) 5115 5116 self.set_selection(basesel) 3930 if numpy.any(nans): 3931 bnans = [ bool(v) for v in nans] 3932 self.flag(bnans) 5117 3933 5118 3934 def get_row_selector(self, rowno): -
trunk/python/selector.py
r3112 r2704 1 1 import re 2 import math3 import string4 2 from asap._asap import selector as _selector, srctype 5 3 from asap.utils import unique, _to_list … … 202 200 raise TypeError('Unknown row number type. Use lists of integers.') 203 201 204 def set_msselection_field(self, selection):205 """206 Set a field selection in msselection syntax. The msselection207 suppports the following syntax:208 209 pattern match:210 - UNIX style pattern match for source name using '*'211 (compatible with set_name)212 213 field id selection:214 - simple number in string ('0', '1', etc.)215 - range specification using '~' ('0~1', etc.)216 - range specification using '>' or '<' in combination217 with '=' ('>=1', '<3', etc.)218 219 comma separated multiple selection:220 - selections can be combined by using ',' ('0,>1',221 'mysource*,2~4', etc.)222 """223 selection_list = map(string.strip, selection.split(','))224 query_list = list(self.generate_query(selection_list))225 if len(query_list) > 0:226 original_query = self.get_query()227 if len(original_query) == 0 or re.match('.*(SRC|FIELD)NAME.*',original_query):228 query = 'SELECT FROM $1 WHERE ' + ' || '.join(query_list)229 else:230 query = 'SELECT FROM $1 WHERE (' + original_query + ') && (' + ' || '.join(query_list) + ')'231 self._settaql(query)232 233 def generate_query(self, selection_list):234 for s in selection_list:235 if s.isdigit() or re.match('^[<>]=?[0-9]*$', s) or \236 re.match('^[0-9]+~[0-9]+$', s):237 #print '"%s" is ID selection using < or <='%(s)238 a = FieldIdRegexGenerator(s)239 yield '(%s)'%(a.get_regex())240 elif len(s) > 0:241 #print '"%s" is UNIX style pattern match'%(s)242 yield '(SRCNAME == pattern(\'%s\'))'%(s)243 244 202 def get_scans(self): 245 203 return list(self._getscans()) … … 317 275 union.set_query(qs) 318 276 return union 319 320 class FieldIdRegexGenerator(object):321 def __init__(self, pattern):322 if pattern.isdigit():323 self.regex = 'FIELDNAME == regex(\'.+__%s$\')'%(pattern)324 else:325 self.regex = None326 ineq = None327 if pattern.find('<') >= 0:328 ineq = '<'329 s = pattern.strip().lstrip(ineq).lstrip('=')330 if not s.isdigit():331 raise RuntimeError('Invalid syntax: %s'%(pattern))332 self.id = int(s) + (-1 if pattern.find('=') < 0 else 0)333 self.template = string.Template('FIELDNAME == regex(\'.+__${reg}$\')')334 elif pattern.find('>') >= 0:335 ineq = '>'336 s = pattern.strip().lstrip(ineq).lstrip('=')337 if not s.isdigit():338 raise RuntimeError('Invalid syntax: %s'%(pattern))339 self.id = int(s) + (-1 if pattern.find('=') >= 0 else 0)340 self.template = string.Template('FIELDNAME == regex(\'.+__[0-9]+$\') && FIELDNAME != regex(\'.+__${reg}$\')')341 elif pattern.find('~') >= 0:342 s = map(string.strip, pattern.split('~'))343 if len(s) == 2 and s[0].isdigit() and s[1].isdigit():344 id0 = int(s[0])345 id1 = int(s[1])346 if id0 == 0:347 self.id = id1348 self.template = string.Template('FIELDNAME == regex(\'.+__${reg}$\')')349 else:350 self.id = [id0-1,id1]351 self.template = string.Template('FIELDNAME == regex(\'.+__${reg}$\') && FIELDNAME != regex(\'.+__${optreg}$\')')352 else:353 raise RuntimeError('Invalid syntax: %s'%(pattern))354 else:355 raise RuntimeError('Invalid syntax: %s'%(pattern))356 #print 'self.id=',self.id357 358 def get_regex(self):359 if self.regex is not None:360 # 'X'361 return self.regex362 elif isinstance(self.id, list):363 # 'X~Y'364 return self.template.safe_substitute(reg=self.__compile(self.id[1]),365 optreg=self.__compile(self.id[0]))366 else:367 # '<(=)X' or '>(=)X'368 return self.template.safe_substitute(reg=self.__compile(self.id))369 370 def __compile(self, idx):371 pattern = ''372 if idx >= 0:373 numerics = map(int,list(str(idx)))374 #numerics.reverse()375 num_digits = len(numerics)376 #print 'numerics=',numerics377 if num_digits == 1:378 if numerics[0] == 0:379 pattern = '0'380 else:381 pattern = '[0-%s]'%(numerics[0])382 elif num_digits == 2:383 pattern = '(%s)'%('|'.join(384 list(self.__gen_two_digit_pattern(numerics))))385 elif num_digits == 3:386 pattern = '(%s)'%('|'.join(387 list(self.__gen_three_digit_pattern(numerics))))388 else:389 raise RuntimeError('ID > 999 is not supported')390 else:391 raise RuntimeError('ID must be >= 0')392 return pattern393 394 def __gen_two_digit_pattern(self, numerics):395 assert len(numerics) == 2396 yield '[0-9]'397 if numerics[0] == 2:398 yield '1[0-9]'399 elif numerics[0] > 2:400 yield '[1-%s][0-9]'%(numerics[0]-1)401 if numerics[1] == 0:402 yield '%s%s'%(numerics[0],numerics[1])403 else:404 yield '%s[0-%s]'%(numerics[0],numerics[1])405 406 def __gen_three_digit_pattern(self, numerics):407 assert len(numerics) == 3408 yield '[0-9]'409 yield '[1-9][0-9]'410 if numerics[0] == 2:411 yield '1[0-9][0-9]'412 elif numerics[0] > 2:413 yield '[1-%s][0-9][0-9]'%(numerics[0]-1)414 if numerics[1] == 0:415 if numerics[2] == 0:416 yield '%s00'%(numerics[0])417 else:418 yield '%s0[0-%s]'%(numerics[0],numerics[2])419 else:420 if numerics[1] > 1:421 yield '%s[0-%s][0-9]'%(numerics[0],numerics[1]-1)422 elif numerics[1] == 1:423 yield '%s0[0-9]'%(numerics[0])424 if numerics[0] == 0:425 yield '%s%s%s'%(numerics[0],numerics[1],numerics[2])426 else:427 yield '%s%s[0-%s]'%(numerics[0],numerics[1],numerics[2]) -
trunk/python/utils.py
r3112 r2704 16 16 17 17 def _is_sequence_or_number(param, ptype=int): 18 import numpy 19 #if isinstance(param,tuple) or isinstance(param,list): 20 if type(param) in (tuple, list, numpy.ndarray): 18 if isinstance(param,tuple) or isinstance(param,list): 21 19 if len(param) == 0: return True # empty list 22 20 out = True … … 33 31 else: return [param] 34 32 if _is_sequence_or_number(param, ptype): 35 return list(param)33 return param 36 34 return None 37 35
Note:
See TracChangeset
for help on using the changeset viewer.