xgb.Booster objects are created from the xgboost package,
which provides efficient and scalable implementations of gradient
boosted decision trees. Given the reliance of post processing
functions on the model object, like xgb.Booster.complete,
on the first class listed, the butcher_xgb.Booster class is
not appended.
Examples
library(xgboost)
library(parsnip)
data(agaricus.train)
if (utils::packageVersion("xgboost") > "2.0.0.0") {
bst <- xgboost(x = agaricus.train$data,
y = as.factor(agaricus.train$label),
learning_rate = 1,
nthread = 2,
nrounds = 2,
eval_metric = "logloss",
objective = "binary:logistic")
} else {
bst <- xgboost(data = agaricus.train$data,
label = agaricus.train$label,
eta = 1,
nthread = 2,
nrounds = 2,
eval_metric = "logloss",
objective = "binary:logistic",
verbose = 0)
}
out <- butcher(bst, verbose = TRUE)
#> ✖ No memory released. Do not butcher.
# Another xgboost model
fit <- boost_tree(mode = "classification", trees = 20) |>
set_engine("xgboost", eval_metric = "mlogloss") |>
fit(Species ~ ., data = iris)
out <- butcher(fit, verbose = TRUE)
#> ✖ The butchered object is 456 B larger than the original. Do not butcher.
