Tuner: Removed Sklearn + scipy dependency
This commit is contained in:
BIN
python/isaac/external/__init__.pyc
vendored
BIN
python/isaac/external/__init__.pyc
vendored
Binary file not shown.
BIN
python/isaac/external/tree.pyc
vendored
BIN
python/isaac/external/tree.pyc
vendored
Binary file not shown.
@@ -112,7 +112,8 @@ def main():
|
||||
library_dirs = [config['lib'] for config in [opencl_config, cuda_config] if config is not None]
|
||||
|
||||
#Include directories
|
||||
include =' src/include src/lib/external'.split() + ['external/boost/', 'external/boost/boost/', os.path.join(find_module("numpy")[1], "core", "include")]
|
||||
numpy_include = os.path.join(find_module("numpy")[1], "core", "include")
|
||||
include =' src/include src/lib/external'.split() + ['external/boost/', 'external/boost/boost/', numpy_include]
|
||||
|
||||
#Source files
|
||||
src = 'src/lib/symbolic/preset.cpp src/lib/symbolic/execute.cpp src/lib/symbolic/io.cpp src/lib/symbolic/expression.cpp src/lib/array.cpp src/lib/value_scalar.cpp src/lib/driver/backend.cpp src/lib/driver/device.cpp src/lib/driver/kernel.cpp src/lib/driver/buffer.cpp src/lib/driver/platform.cpp src/lib/driver/check.cpp src/lib/driver/program.cpp src/lib/driver/command_queue.cpp src/lib/driver/program_cache.cpp src/lib/driver/context.cpp src/lib/driver/event.cpp src/lib/driver/ndrange.cpp src/lib/driver/handle.cpp src/lib/exception/unknown_datatype.cpp src/lib/exception/operation_not_supported.cpp src/lib/profiles/presets.cpp src/lib/profiles/profiles.cpp src/lib/profiles/predictors/random_forest.cpp src/lib/kernels/templates/gemv.cpp src/lib/kernels/templates/axpy.cpp src/lib/kernels/templates/gemm.cpp src/lib/kernels/templates/ger.cpp src/lib/kernels/templates/dot.cpp src/lib/kernels/templates/base.cpp src/lib/kernels/mapped_object.cpp src/lib/kernels/stream.cpp src/lib/kernels/parse.cpp src/lib/kernels/keywords.cpp src/lib/kernels/binder.cpp src/lib/wrap/clBLAS.cpp '.split() + [os.path.join('src', 'bind', sf) for sf in ['_isaac.cpp', 'core.cpp', 'driver.cpp', 'kernels.cpp', 'exceptions.cpp']]
|
||||
@@ -129,16 +130,10 @@ def main():
|
||||
src= [f for f in src if not f.endswith("once_atomic.cpp")]
|
||||
|
||||
#Setup
|
||||
setup(
|
||||
name='isaac',
|
||||
version='1.0',
|
||||
description="Input-specific architecture-aware computations",
|
||||
author='Philippe Tillet',
|
||||
author_email='ptillet@g.harvard.edu',
|
||||
license='MPL 2.0',
|
||||
packages=["isaac"],
|
||||
ext_package="isaac",
|
||||
ext_modules=[Extension(
|
||||
extensions = []
|
||||
|
||||
#ISAAC
|
||||
extensions += [Extension(
|
||||
'_isaac',src,
|
||||
extra_compile_args= backend_defines + ['-std=c++11', '-Wno-unused-function', '-Wno-unused-local-typedefs', '-Wno-sign-compare', '-Wno-attributes', '-DBOOST_PYTHON_SOURCE '],
|
||||
extra_link_args=['-Wl,-soname=_isaac.so'],
|
||||
@@ -146,7 +141,24 @@ def main():
|
||||
include_dirs=include,
|
||||
library_dirs=library_dirs,
|
||||
libraries=libraries
|
||||
)],
|
||||
)]
|
||||
|
||||
#External
|
||||
extensions += [Extension('external._tree',
|
||||
['isaac/external/_tree.c'],
|
||||
extra_compile_args = ["-O3", "-Wall"],
|
||||
include_dirs = ['.', numpy_include])]
|
||||
|
||||
setup(
|
||||
name='isaac',
|
||||
version='1.0',
|
||||
description="Input-specific architecture-aware computations",
|
||||
author='Philippe Tillet',
|
||||
author_email='ptillet@g.harvard.edu',
|
||||
license='MPL 2.0',
|
||||
packages=["isaac", "isaac.external"],
|
||||
ext_package="isaac",
|
||||
ext_modules=extensions,
|
||||
cmdclass={'build_py': build_py, 'build_ext': build_ext_subclass},
|
||||
classifiers=[
|
||||
'Environment :: Console',
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from sklearn import ensemble
|
||||
from isaac.external.forest import RandomForestRegressor
|
||||
import numpy as np
|
||||
|
||||
def gmean(a, axis=0, dtype=None):
|
||||
@@ -39,7 +39,7 @@ def train(X, Y, profiles):
|
||||
nrmses = {}
|
||||
for N in range(1,min(M+1,20)):
|
||||
for depth in range(1,min(M+1,20)):
|
||||
clf = ensemble.RandomForestRegressor(N, max_depth=depth).fit(XTr, YTr)
|
||||
clf = RandomForestRegressor(N, max_depth=depth).fit(XTr, YTr)
|
||||
t = np.argmax(clf.predict(XCv), axis = 1)
|
||||
y = np.array([YCv[i,t[i]] for i in range(t.size)])
|
||||
ground = np.max(YCv[:,:], axis=1)
|
||||
|
@@ -3,8 +3,8 @@ from math import log, isinf
|
||||
from itertools import chain, product
|
||||
from numpy import argsort, argmax
|
||||
from operator import mul
|
||||
from sklearn import ensemble
|
||||
import isaac as sc
|
||||
from isaac.external.forest import RandomForestRegressor
|
||||
import optimize, tools, model
|
||||
|
||||
from json import encoder
|
||||
@@ -82,8 +82,8 @@ def tune(device, operation, json_path):
|
||||
if nparams==1:
|
||||
predicted = profiles[0]
|
||||
else:
|
||||
clf = ensemble.RandomForestRegressor(min(10, idx+1), max_depth=min(10, idx+1)).fit(X, Y)
|
||||
#clf, nrmse = profile.train(X, Y, profiles)
|
||||
clf = RandomForestRegressor(min(10, idx+1), max_depth=min(10, idx+1)).fit(X, Y)
|
||||
#clf, nrmse = model.train(X, Y, profiles)
|
||||
predperf = clf.predict(x)[0]
|
||||
best = (-predperf).argsort()[:5]
|
||||
perf = [performance(x, tools.benchmark(operation, profiles[b], tree)) for b in best]
|
||||
@@ -130,7 +130,7 @@ def tune(device, operation, json_path):
|
||||
json_data[operation_name]['float32'] = {}
|
||||
D = json_data[operation_name]['float32']
|
||||
if len(profiles) > 1:
|
||||
clf, nrmse = profile.train(X, Y, profiles)
|
||||
clf, nrmse = model.train(X, Y, profiles)
|
||||
D['predictor'] = [{'children_left': e.tree_.children_left.tolist(),
|
||||
'children_right': e.tree_.children_right.tolist(),
|
||||
'threshold': e.tree_.threshold.astype('float64').tolist(),
|
||||
|
Reference in New Issue
Block a user