Files
triton/tune/model.py
Philippe Tillet e7cabf65ac Tuning: Merged tune branch.
- Much cleaner and more concise source
- Better exceptions handling
- Checks local minima to see if retuning is needed.

Resolved conflicts:
	bench/blas.cpp
	include/isaac/backend/templates/mproduct.h
	include/isaac/driver/buffer.h
	lib/array.cpp
	lib/backend/templates/mproduct.cpp
	lib/driver/buffer.cpp
	python/setup.py
	tune/pysrc/autotune.py
	tune/pysrc/dataset.py
	tune/pysrc/misc_tools.py
2015-06-28 17:53:16 -07:00

49 lines
1.5 KiB
Python

from sklearn import ensemble
import numpy as np
def gmean(a, axis=0, dtype=None):
if not isinstance(a, np.ndarray): # if not an ndarray object attempt to convert it
log_a = np.log(np.array(a, dtype=dtype))
elif dtype: # Must change the default dtype allowing array type
if isinstance(a,np.ma.MaskedArray):
log_a = np.log(np.ma.asarray(a, dtype=dtype))
else:
log_a = np.log(np.asarray(a, dtype=dtype))
else:
log_a = np.log(a)
return np.exp(log_a.mean(axis=axis))
def nrmse(y_ground, y):
N = y.size
rmsd = np.sqrt(np.sum((y_ground - y)**2)/N)
if len(y_ground) > 1:
return rmsd/(np.max(y_ground) - np.min(y_ground))
else:
return rmsd
def train(X, Y, profiles):
X = np.array(X)
Y = np.array(Y)
M = X.shape[0]
p = np.random.permutation(X.shape[0])
X = X[p,:]
Y = Y[p,:]
#Train the model
cut = int(0.9*M)
XTr, YTr = X[:cut,:], Y[:cut,:]
XCv, YCv = X[cut:,:], Y[cut:,:]
nrmses = {}
for N in range(1,min(M+1,20)):
for depth in range(1,min(M+1,20)):
clf = ensemble.RandomForestRegressor(N, max_depth=depth).fit(XTr, YTr)
t = np.argmax(clf.predict(XCv), axis = 1)
y = np.array([YCv[i,t[i]] for i in range(t.size)])
ground = np.max(YCv[:,:], axis=1)
nrmses[clf] = nrmse(ground, y)
clf = min(nrmses, key=nrmses.get)
return clf, nrmses[clf]