I work a bit differently with xgb. The following code will give you some control over your hyper parameters and I would be able to assist you if things doesn't work
import xgboost as xgb
dtrain = xgb.DMatrix(X_train_np, label=y_train_np)
dtest = xgb.DMatrix(X_test_np, label=y_test_np)
# Here we set eval_metric to be 'auc' as well as other hypter parameters of xgboost
param0 = [
('max_depth', 4),
('eta', 0.1),
('objective', 'binary:logistic'),
('min_child_weight', 4),
('silent', 1),
('eval_metric', 'auc'),
('subsample', 0.75),
('colsample_bytree', 0.75),
('gamma', 1),
]
watchlist = [(dtrain, "trn"), (dtest, "tst")]
n_estimators = 100
# This is the same as fitting
model = xgb.train(param0, dtrain, n_estimators , evals=watchlist)