Scalable, Portable and Distributed Gradient Boosting-
How to install-
FPGA Platforms-
Get the available accelerators for your platform.
xilinx dynamic-shell
Examples-
Get Started with XGBoost-
This is a quick start tutorial for you to quickly try out XGBoost on the demo dataset on a classification task.
import xgboost as xgb
from sklearn.datasets import fetch_openml
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import Normalizer
from timeit import default_timer as timestamp
X, y = fetch_openml('SVHN', return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.35)
features = Normalizer()
X_train = features.fit_transform(X_train)
X_test = features.transform(X_test)
label = LabelEncoder()
y_train = label.fit_transform(y_train)
y_test = label.transform(y_test)
params = {
'alpha': 0.0,
'eta': 0.3,
'max_depth': 10,
'num_class': len(label.classes_),
'objective': 'multi:softmax',
'subsample': 1.0,
'tree_method': 'fpga_exact'
}
dtrain = xgb.DMatrix(X_train, y_train)
dtest = xgb.DMatrix(X_test, y_test)
begin = timestamp()
model = xgb.train(params, dtrain, 10)
end = timestamp()
print('time=%.3f' % (end - begin))
predictions = model.predict(dtest)
print('accuracy=%.3f' % accuracy_score(y_test, predictions))