Tuner: Removed Sklearn + scipy dependency
This commit is contained in:
BIN
python/isaac/external/__init__.pyc
vendored
BIN
python/isaac/external/__init__.pyc
vendored
Binary file not shown.
BIN
python/isaac/external/tree.pyc
vendored
BIN
python/isaac/external/tree.pyc
vendored
Binary file not shown.
@@ -112,7 +112,8 @@ def main():
|
|||||||
library_dirs = [config['lib'] for config in [opencl_config, cuda_config] if config is not None]
|
library_dirs = [config['lib'] for config in [opencl_config, cuda_config] if config is not None]
|
||||||
|
|
||||||
#Include directories
|
#Include directories
|
||||||
include =' src/include src/lib/external'.split() + ['external/boost/', 'external/boost/boost/', os.path.join(find_module("numpy")[1], "core", "include")]
|
numpy_include = os.path.join(find_module("numpy")[1], "core", "include")
|
||||||
|
include =' src/include src/lib/external'.split() + ['external/boost/', 'external/boost/boost/', numpy_include]
|
||||||
|
|
||||||
#Source files
|
#Source files
|
||||||
src = 'src/lib/symbolic/preset.cpp src/lib/symbolic/execute.cpp src/lib/symbolic/io.cpp src/lib/symbolic/expression.cpp src/lib/array.cpp src/lib/value_scalar.cpp src/lib/driver/backend.cpp src/lib/driver/device.cpp src/lib/driver/kernel.cpp src/lib/driver/buffer.cpp src/lib/driver/platform.cpp src/lib/driver/check.cpp src/lib/driver/program.cpp src/lib/driver/command_queue.cpp src/lib/driver/program_cache.cpp src/lib/driver/context.cpp src/lib/driver/event.cpp src/lib/driver/ndrange.cpp src/lib/driver/handle.cpp src/lib/exception/unknown_datatype.cpp src/lib/exception/operation_not_supported.cpp src/lib/profiles/presets.cpp src/lib/profiles/profiles.cpp src/lib/profiles/predictors/random_forest.cpp src/lib/kernels/templates/gemv.cpp src/lib/kernels/templates/axpy.cpp src/lib/kernels/templates/gemm.cpp src/lib/kernels/templates/ger.cpp src/lib/kernels/templates/dot.cpp src/lib/kernels/templates/base.cpp src/lib/kernels/mapped_object.cpp src/lib/kernels/stream.cpp src/lib/kernels/parse.cpp src/lib/kernels/keywords.cpp src/lib/kernels/binder.cpp src/lib/wrap/clBLAS.cpp '.split() + [os.path.join('src', 'bind', sf) for sf in ['_isaac.cpp', 'core.cpp', 'driver.cpp', 'kernels.cpp', 'exceptions.cpp']]
|
src = 'src/lib/symbolic/preset.cpp src/lib/symbolic/execute.cpp src/lib/symbolic/io.cpp src/lib/symbolic/expression.cpp src/lib/array.cpp src/lib/value_scalar.cpp src/lib/driver/backend.cpp src/lib/driver/device.cpp src/lib/driver/kernel.cpp src/lib/driver/buffer.cpp src/lib/driver/platform.cpp src/lib/driver/check.cpp src/lib/driver/program.cpp src/lib/driver/command_queue.cpp src/lib/driver/program_cache.cpp src/lib/driver/context.cpp src/lib/driver/event.cpp src/lib/driver/ndrange.cpp src/lib/driver/handle.cpp src/lib/exception/unknown_datatype.cpp src/lib/exception/operation_not_supported.cpp src/lib/profiles/presets.cpp src/lib/profiles/profiles.cpp src/lib/profiles/predictors/random_forest.cpp src/lib/kernels/templates/gemv.cpp src/lib/kernels/templates/axpy.cpp src/lib/kernels/templates/gemm.cpp src/lib/kernels/templates/ger.cpp src/lib/kernels/templates/dot.cpp src/lib/kernels/templates/base.cpp src/lib/kernels/mapped_object.cpp src/lib/kernels/stream.cpp src/lib/kernels/parse.cpp src/lib/kernels/keywords.cpp src/lib/kernels/binder.cpp src/lib/wrap/clBLAS.cpp '.split() + [os.path.join('src', 'bind', sf) for sf in ['_isaac.cpp', 'core.cpp', 'driver.cpp', 'kernels.cpp', 'exceptions.cpp']]
|
||||||
@@ -129,6 +130,25 @@ def main():
|
|||||||
src= [f for f in src if not f.endswith("once_atomic.cpp")]
|
src= [f for f in src if not f.endswith("once_atomic.cpp")]
|
||||||
|
|
||||||
#Setup
|
#Setup
|
||||||
|
extensions = []
|
||||||
|
|
||||||
|
#ISAAC
|
||||||
|
extensions += [Extension(
|
||||||
|
'_isaac',src,
|
||||||
|
extra_compile_args= backend_defines + ['-std=c++11', '-Wno-unused-function', '-Wno-unused-local-typedefs', '-Wno-sign-compare', '-Wno-attributes', '-DBOOST_PYTHON_SOURCE '],
|
||||||
|
extra_link_args=['-Wl,-soname=_isaac.so'],
|
||||||
|
undef_macros=[],
|
||||||
|
include_dirs=include,
|
||||||
|
library_dirs=library_dirs,
|
||||||
|
libraries=libraries
|
||||||
|
)]
|
||||||
|
|
||||||
|
#External
|
||||||
|
extensions += [Extension('external._tree',
|
||||||
|
['isaac/external/_tree.c'],
|
||||||
|
extra_compile_args = ["-O3", "-Wall"],
|
||||||
|
include_dirs = ['.', numpy_include])]
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='isaac',
|
name='isaac',
|
||||||
version='1.0',
|
version='1.0',
|
||||||
@@ -136,17 +156,9 @@ def main():
|
|||||||
author='Philippe Tillet',
|
author='Philippe Tillet',
|
||||||
author_email='ptillet@g.harvard.edu',
|
author_email='ptillet@g.harvard.edu',
|
||||||
license='MPL 2.0',
|
license='MPL 2.0',
|
||||||
packages=["isaac"],
|
packages=["isaac", "isaac.external"],
|
||||||
ext_package="isaac",
|
ext_package="isaac",
|
||||||
ext_modules=[Extension(
|
ext_modules=extensions,
|
||||||
'_isaac',src,
|
|
||||||
extra_compile_args= backend_defines + ['-std=c++11', '-Wno-unused-function', '-Wno-unused-local-typedefs', '-Wno-sign-compare', '-Wno-attributes', '-DBOOST_PYTHON_SOURCE '],
|
|
||||||
extra_link_args=['-Wl,-soname=_isaac.so'],
|
|
||||||
undef_macros=[],
|
|
||||||
include_dirs=include,
|
|
||||||
library_dirs=library_dirs,
|
|
||||||
libraries=libraries
|
|
||||||
)],
|
|
||||||
cmdclass={'build_py': build_py, 'build_ext': build_ext_subclass},
|
cmdclass={'build_py': build_py, 'build_ext': build_ext_subclass},
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Environment :: Console',
|
'Environment :: Console',
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
from sklearn import ensemble
|
from isaac.external.forest import RandomForestRegressor
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
def gmean(a, axis=0, dtype=None):
|
def gmean(a, axis=0, dtype=None):
|
||||||
@@ -39,7 +39,7 @@ def train(X, Y, profiles):
|
|||||||
nrmses = {}
|
nrmses = {}
|
||||||
for N in range(1,min(M+1,20)):
|
for N in range(1,min(M+1,20)):
|
||||||
for depth in range(1,min(M+1,20)):
|
for depth in range(1,min(M+1,20)):
|
||||||
clf = ensemble.RandomForestRegressor(N, max_depth=depth).fit(XTr, YTr)
|
clf = RandomForestRegressor(N, max_depth=depth).fit(XTr, YTr)
|
||||||
t = np.argmax(clf.predict(XCv), axis = 1)
|
t = np.argmax(clf.predict(XCv), axis = 1)
|
||||||
y = np.array([YCv[i,t[i]] for i in range(t.size)])
|
y = np.array([YCv[i,t[i]] for i in range(t.size)])
|
||||||
ground = np.max(YCv[:,:], axis=1)
|
ground = np.max(YCv[:,:], axis=1)
|
||||||
|
@@ -3,8 +3,8 @@ from math import log, isinf
|
|||||||
from itertools import chain, product
|
from itertools import chain, product
|
||||||
from numpy import argsort, argmax
|
from numpy import argsort, argmax
|
||||||
from operator import mul
|
from operator import mul
|
||||||
from sklearn import ensemble
|
|
||||||
import isaac as sc
|
import isaac as sc
|
||||||
|
from isaac.external.forest import RandomForestRegressor
|
||||||
import optimize, tools, model
|
import optimize, tools, model
|
||||||
|
|
||||||
from json import encoder
|
from json import encoder
|
||||||
@@ -82,8 +82,8 @@ def tune(device, operation, json_path):
|
|||||||
if nparams==1:
|
if nparams==1:
|
||||||
predicted = profiles[0]
|
predicted = profiles[0]
|
||||||
else:
|
else:
|
||||||
clf = ensemble.RandomForestRegressor(min(10, idx+1), max_depth=min(10, idx+1)).fit(X, Y)
|
clf = RandomForestRegressor(min(10, idx+1), max_depth=min(10, idx+1)).fit(X, Y)
|
||||||
#clf, nrmse = profile.train(X, Y, profiles)
|
#clf, nrmse = model.train(X, Y, profiles)
|
||||||
predperf = clf.predict(x)[0]
|
predperf = clf.predict(x)[0]
|
||||||
best = (-predperf).argsort()[:5]
|
best = (-predperf).argsort()[:5]
|
||||||
perf = [performance(x, tools.benchmark(operation, profiles[b], tree)) for b in best]
|
perf = [performance(x, tools.benchmark(operation, profiles[b], tree)) for b in best]
|
||||||
@@ -130,7 +130,7 @@ def tune(device, operation, json_path):
|
|||||||
json_data[operation_name]['float32'] = {}
|
json_data[operation_name]['float32'] = {}
|
||||||
D = json_data[operation_name]['float32']
|
D = json_data[operation_name]['float32']
|
||||||
if len(profiles) > 1:
|
if len(profiles) > 1:
|
||||||
clf, nrmse = profile.train(X, Y, profiles)
|
clf, nrmse = model.train(X, Y, profiles)
|
||||||
D['predictor'] = [{'children_left': e.tree_.children_left.tolist(),
|
D['predictor'] = [{'children_left': e.tree_.children_left.tolist(),
|
||||||
'children_right': e.tree_.children_right.tolist(),
|
'children_right': e.tree_.children_right.tolist(),
|
||||||
'threshold': e.tree_.threshold.astype('float64').tolist(),
|
'threshold': e.tree_.threshold.astype('float64').tolist(),
|
||||||
|
Reference in New Issue
Block a user