import xgboost as xgb | |
from xgboost import XGBClassifier | |
params = {"loss":"deviance", | |
"max_depth":10, | |
"n_estimators":100} | |
xgb_clf = XGBClassifier(**params) | |
xgb_clf.fit(train_data, train_label) | |
params2 = {'base_score': 0.5, | |
'booster': 'gbtree', | |
'colsample_bylevel': 1, | |
'colsample_bytree': 1, | |
'gamma': 0, | |
'learning_rate': 0.1, | |
'max_delta_step': 0, | |
'max_depth': 3, | |
'min_child_weight': 1, | |
'missing': None, | |
'n_estimators': 100, | |
'n_jobs': 1, | |
'nthread': None, | |
'objective': 'multi:softprob', | |
'random_state': 0, | |
'reg_alpha': 0, | |
'reg_lambda': 1, | |
'scale_pos_weight': 1, | |
'seed': None, | |
'silent': True, | |
'subsample': 1} | |
xgb_clf.get_params() | |
y_pred_xgb = xgb_clf.predict(test_data) | |
y_pred_xgb[:5] | |
correct = np.nonzero(test_label == y_pred_xgb)[0] | |
wrong = np.nonzero(test_label != y_pred_xgb)[0] | |
print(classification_report(test_label, y_pred_xgb, target_names=label_names)) | |
print(accuracy_score(test_label, y_pred_xgb)) Now after applying XGBoost, we are getting an accuracy of 90%. |
Comments