[R-package] [docs] Simplified examples to cut example run time (fixes #2988) (#2989)

* [R-package] [docs] Simplified examles to cut example run time (fixes #2988)

* updated learning rates
This commit is contained in:
James Lamb 2020-04-15 01:17:56 +01:00 коммит произвёл GitHub
Родитель 151bf0703a
Коммит 181616748b
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
46 изменённых файлов: 132 добавлений и 124 удалений

Просмотреть файл

@ -711,7 +711,6 @@ Booster <- R6::R6Class(
#' number of columns corresponding to the number of trees.
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -723,11 +722,10 @@ Booster <- R6::R6Class(
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 10L
#' , nrounds = 5L
#' , valids = valids
#' , min_data = 1L
#' , learning_rate = 1.0
#' , early_stopping_rounds = 5L
#' )
#' preds <- predict(model, test$data)
#' @export
@ -769,7 +767,7 @@ predict.lgb.Booster <- function(object,
#' @return lgb.Booster
#'
#' @examples
#' library(lightgbm)
#' \donttest{
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -781,17 +779,17 @@ predict.lgb.Booster <- function(object,
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 10L
#' , nrounds = 5L
#' , valids = valids
#' , min_data = 1L
#' , learning_rate = 1.0
#' , early_stopping_rounds = 5L
#' , early_stopping_rounds = 3L
#' )
#' lgb.save(model, "model.txt")
#' load_booster <- lgb.load(filename = "model.txt")
#' model_string <- model$save_model_to_string(NULL) # saves best iteration
#' load_booster_from_str <- lgb.load(model_str = model_string)
#'
#' }
#' @export
lgb.load <- function(filename = NULL, model_str = NULL) {
@ -828,6 +826,7 @@ lgb.load <- function(filename = NULL, model_str = NULL) {
#' @return lgb.Booster
#'
#' @examples
#' \donttest{
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
@ -847,6 +846,7 @@ lgb.load <- function(filename = NULL, model_str = NULL) {
#' , early_stopping_rounds = 5L
#' )
#' lgb.save(model, "model.txt")
#' }
#' @export
lgb.save <- function(booster, filename, num_iteration = NULL) {
@ -874,6 +874,7 @@ lgb.save <- function(booster, filename, num_iteration = NULL) {
#' @return json format of model
#'
#' @examples
#' \donttest{
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
@ -893,7 +894,7 @@ lgb.save <- function(booster, filename, num_iteration = NULL) {
#' , early_stopping_rounds = 5L
#' )
#' json_model <- lgb.dump(model)
#'
#' }
#' @export
lgb.dump <- function(booster, num_iteration = NULL) {
@ -922,7 +923,6 @@ lgb.dump <- function(booster, num_iteration = NULL) {
#'
#' @examples
#' # train a regression model
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -934,11 +934,10 @@ lgb.dump <- function(booster, num_iteration = NULL) {
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 10L
#' , nrounds = 5L
#' , valids = valids
#' , min_data = 1L
#' , learning_rate = 1.0
#' , early_stopping_rounds = 5L
#' )
#'
#' # Examine valid data_name values

Просмотреть файл

@ -725,7 +725,6 @@ Dataset <- R6::R6Class(
#' @return constructed dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -770,7 +769,6 @@ lgb.Dataset <- function(data,
#' @return constructed dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -797,7 +795,6 @@ lgb.Dataset.create.valid <- function(dataset, data, info = list(), ...) {
#' @param dataset Object of class \code{lgb.Dataset}
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -828,7 +825,6 @@ lgb.Dataset.construct <- function(dataset) {
#' be directly used with an \code{lgb.Dataset} object.
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -863,7 +859,6 @@ dim.lgb.Dataset <- function(x, ...) {
#' Since row names are irrelevant, it is recommended to use \code{colnames} directly.
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -936,7 +931,6 @@ dimnames.lgb.Dataset <- function(x) {
#' @return constructed sub dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -983,7 +977,6 @@ slice.lgb.Dataset <- function(dataset, idxset, ...) {
#' }
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -1037,7 +1030,6 @@ getinfo.lgb.Dataset <- function(dataset, name, ...) {
#' }
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -1078,7 +1070,6 @@ setinfo.lgb.Dataset <- function(dataset, name, info, ...) {
#' @return passed dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -1109,7 +1100,6 @@ lgb.Dataset.set.categorical <- function(dataset, categorical_feature) {
#' @return passed dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package ="lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -1141,7 +1131,6 @@ lgb.Dataset.set.reference <- function(dataset, reference) {
#' @return passed dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -56,7 +56,6 @@ CVBooster <- R6::R6Class(
#' @return a trained model \code{lgb.CVBooster}.
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -64,11 +63,10 @@ CVBooster <- R6::R6Class(
#' model <- lgb.cv(
#' params = params
#' , data = dtrain
#' , nrounds = 10L
#' , nrounds = 5L
#' , nfold = 3L
#' , min_data = 1L
#' , learning_rate = 1.0
#' , early_stopping_rounds = 5L
#' )
#' @importFrom data.table data.table setorderv
#' @export

Просмотреть файл

@ -13,20 +13,22 @@
#' }
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
#'
#' params <- list(
#' objective = "binary"
#' , learning_rate = 0.01
#' , num_leaves = 63L
#' , learning_rate = 0.1
#' , max_depth = -1L
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' )
#' model <- lgb.train(params, dtrain, 10L)
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 5L
#' )
#'
#' tree_imp1 <- lgb.importance(model, percentage = TRUE)
#' tree_imp2 <- lgb.importance(model, percentage = FALSE)

Просмотреть файл

@ -16,7 +16,6 @@
#' Contribution columns to each class.
#'
#' @examples
#' Sigmoid <- function(x) 1.0 / (1.0 + exp(-x))
#' Logit <- function(x) log(x / (1.0 - x))
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
@ -27,13 +26,16 @@
#'
#' params <- list(
#' objective = "binary"
#' , learning_rate = 0.01
#' , num_leaves = 63L
#' , learning_rate = 0.1
#' , max_depth = -1L
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' )
#' model <- lgb.train(params, dtrain, 10L)
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 3L
#' )
#'
#' tree_interpretation <- lgb.interprete(model, test$data, 1L:5L)
#'

Просмотреть файл

@ -24,17 +24,19 @@
#'
#' params <- list(
#' objective = "binary"
#' , learning_rate = 0.01
#' , num_leaves = 63L
#' , max_depth = -1L
#' , learning_rate = 0.1
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' )
#'
#' model <- lgb.train(params, dtrain, 10L)
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 5L
#' )
#'
#' tree_imp <- lgb.importance(model, percentage = TRUE)
#' lgb.plot.importance(tree_imp, top_n = 10L, measure = "Gain")
#' lgb.plot.importance(tree_imp, top_n = 5L, measure = "Gain")
#' @importFrom graphics barplot par
#' @export
lgb.plot.importance <- function(tree_imp,

Просмотреть файл

@ -15,28 +15,43 @@
#' The \code{lgb.plot.interpretation} function creates a \code{barplot}.
#'
#' @examples
#' library(lightgbm)
#' Sigmoid <- function(x) {1.0 / (1.0 + exp(-x))}
#' Logit <- function(x) {log(x / (1.0 - x))}
#' \donttest{
#' Logit <- function(x) {
#' log(x / (1.0 - x))
#' }
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
#' setinfo(dtrain, "init_score", rep(Logit(mean(train$label)), length(train$label)))
#' labels <- agaricus.train$label
#' dtrain <- lgb.Dataset(
#' agaricus.train$data
#' , label = labels
#' )
#' setinfo(dtrain, "init_score", rep(Logit(mean(labels)), length(labels)))
#'
#' data(agaricus.test, package = "lightgbm")
#' test <- agaricus.test
#'
#' params <- list(
#' objective = "binary"
#' , learning_rate = 0.01
#' , num_leaves = 63L
#' , learning_rate = 0.1
#' , max_depth = -1L
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' )
#' model <- lgb.train(params, dtrain, 10L)
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 5L
#' )
#'
#' tree_interpretation <- lgb.interprete(model, test$data, 1L:5L)
#' lgb.plot.interpretation(tree_interpretation[[1L]], top_n = 10L)
#' tree_interpretation <- lgb.interprete(
#' model = model
#' , data = agaricus.test$data
#' , idxset = 1L:5L
#' )
#' lgb.plot.interpretation(
#' tree_interpretation_dt = tree_interpretation[[1L]]
#' , top_n = 5L
#' )
#' }
#' @importFrom data.table setnames
#' @importFrom graphics barplot par
#' @export

Просмотреть файл

@ -8,7 +8,6 @@
#' for input in \code{lgb.Dataset}.
#'
#' @examples
#' library(lightgbm)
#' data(iris)
#'
#' str(iris)

Просмотреть файл

@ -11,7 +11,6 @@
#' for input in \code{lgb.Dataset}.
#'
#' @examples
#' library(lightgbm)
#' data(iris)
#'
#' str(iris)

Просмотреть файл

@ -10,7 +10,6 @@
#' in \code{lgb.Dataset}.
#'
#' @examples
#' library(lightgbm)
#' data(iris)
#'
#' str(iris)

Просмотреть файл

@ -13,7 +13,6 @@
#' \code{lgb.Dataset}.
#'
#' @examples
#' library(lightgbm)
#' data(iris)
#'
#' str(iris)

Просмотреть файл

@ -29,7 +29,6 @@
#' @return a trained booster model \code{lgb.Booster}.
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -41,11 +40,11 @@
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 10L
#' , nrounds = 5L
#' , valids = valids
#' , min_data = 1L
#' , learning_rate = 1.0
#' , early_stopping_rounds = 5L
#' , early_stopping_rounds = 3L
#' )
#' @export
lgb.train <- function(params = list(),

Просмотреть файл

@ -14,7 +14,6 @@
#' @return NULL invisibly.
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
@ -26,11 +25,10 @@
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 10L
#' , nrounds = 5L
#' , valids = valids
#' , min_data = 1L
#' , learning_rate = 1.0
#' , early_stopping_rounds = 5L
#' )
#'
#' \dontrun{

Просмотреть файл

@ -7,6 +7,7 @@
#' @return \code{lgb.Booster}.
#'
#' @examples
#' \donttest{
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
@ -27,7 +28,7 @@
#' )
#' saveRDS.lgb.Booster(model, "model.rds")
#' new_model <- readRDS.lgb.Booster("model.rds")
#'
#' }
#' @export
readRDS.lgb.Booster <- function(file = "", refhook = NULL) {

Просмотреть файл

@ -18,6 +18,7 @@
#' @return NULL invisibly.
#'
#' @examples
#' \donttest{
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
@ -37,6 +38,7 @@
#' , early_stopping_rounds = 5L
#' )
#' saveRDS.lgb.Booster(model, "model.rds")
#' }
#' @export
saveRDS.lgb.Booster <- function(object,
file = "",

Просмотреть файл

@ -22,7 +22,6 @@ Note: since \code{nrow} and \code{ncol} internally use \code{dim}, they can also
be directly used with an \code{lgb.Dataset} object.
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -24,7 +24,6 @@ Generic \code{dimnames} methods are used by \code{colnames}.
Since row names are irrelevant, it is recommended to use \code{colnames} directly.
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -33,7 +33,6 @@ The \code{name} field can be one of the following:
}
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -40,7 +40,6 @@ Construct \code{lgb.Dataset} object from dense matrix, sparse matrix
or local file (that was created previously by saving an \code{lgb.Dataset}).
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -13,7 +13,6 @@ lgb.Dataset.construct(dataset)
Construct Dataset explicitly
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -22,7 +22,6 @@ constructed dataset
Construct validation data according to training data
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -19,7 +19,6 @@ Please note that \code{init_score} is not saved in binary file.
If you need it, please set it again after loading Dataset.
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -21,7 +21,6 @@ Set the categorical features of an \code{lgb.Dataset} object. Use this function
to tell LightGBM which features should be treated as categorical.
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -18,7 +18,6 @@ passed dataset
If you want to use validation data, you should set reference to training data
}
\examples{
library(lightgbm)
data(agaricus.train, package ="lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -100,7 +100,6 @@ a trained model \code{lgb.CVBooster}.
Cross validation logic used by LightGBM
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
@ -108,10 +107,9 @@ params <- list(objective = "regression", metric = "l2")
model <- lgb.cv(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, nfold = 3L
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
)
}

Просмотреть файл

@ -18,6 +18,7 @@ json format of model
Dump LightGBM model to json
}
\examples{
\donttest{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
@ -37,5 +38,5 @@ model <- lgb.train(
, early_stopping_rounds = 5L
)
json_model <- lgb.dump(model)
}
}

Просмотреть файл

@ -33,7 +33,6 @@ Given a \code{lgb.Booster}, return evaluation results for a
}
\examples{
# train a regression model
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
@ -45,11 +44,10 @@ valids <- list(test = dtest)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, valids = valids
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
)
# Examine valid data_name values

Просмотреть файл

@ -24,20 +24,22 @@ For a tree model, a \code{data.table} with the following columns:
Creates a \code{data.table} of feature importances in a model.
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
params <- list(
objective = "binary"
, learning_rate = 0.01
, num_leaves = 63L
, learning_rate = 0.1
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
)
model <- lgb.train(params, dtrain, 10L)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 5L
)
tree_imp1 <- lgb.importance(model, percentage = TRUE)
tree_imp2 <- lgb.importance(model, percentage = FALSE)

Просмотреть файл

@ -29,7 +29,6 @@ For regression, binary classification and lambdarank model, a \code{list} of \co
Computes feature contribution components of rawscore prediction.
}
\examples{
Sigmoid <- function(x) 1.0 / (1.0 + exp(-x))
Logit <- function(x) log(x / (1.0 - x))
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
@ -40,13 +39,16 @@ test <- agaricus.test
params <- list(
objective = "binary"
, learning_rate = 0.01
, num_leaves = 63L
, learning_rate = 0.1
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
)
model <- lgb.train(params, dtrain, 10L)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 3L
)
tree_interpretation <- lgb.interprete(model, test$data, 1L:5L)

Просмотреть файл

@ -19,7 +19,7 @@ Load LightGBM takes in either a file path or model string.
If both are provided, Load will default to loading from file
}
\examples{
library(lightgbm)
\donttest{
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
@ -31,15 +31,15 @@ valids <- list(test = dtest)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, valids = valids
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
, early_stopping_rounds = 3L
)
lgb.save(model, "model.txt")
load_booster <- lgb.load(filename = "model.txt")
model_string <- model$save_model_to_string(NULL) # saves best iteration
load_booster_from_str <- lgb.load(model_str = model_string)
}
}

Просмотреть файл

@ -43,15 +43,17 @@ dtrain <- lgb.Dataset(train$data, label = train$label)
params <- list(
objective = "binary"
, learning_rate = 0.01
, num_leaves = 63L
, max_depth = -1L
, learning_rate = 0.1
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
)
model <- lgb.train(params, dtrain, 10L)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 5L
)
tree_imp <- lgb.importance(model, percentage = TRUE)
lgb.plot.importance(tree_imp, top_n = 10L, measure = "Gain")
lgb.plot.importance(tree_imp, top_n = 5L, measure = "Gain")
}

Просмотреть файл

@ -34,26 +34,41 @@ The graph represents each feature as a horizontal bar of length proportional to
contribution of a feature. Features are shown ranked in a decreasing contribution order.
}
\examples{
library(lightgbm)
Sigmoid <- function(x) {1.0 / (1.0 + exp(-x))}
Logit <- function(x) {log(x / (1.0 - x))}
\donttest{
Logit <- function(x) {
log(x / (1.0 - x))
}
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
setinfo(dtrain, "init_score", rep(Logit(mean(train$label)), length(train$label)))
labels <- agaricus.train$label
dtrain <- lgb.Dataset(
agaricus.train$data
, label = labels
)
setinfo(dtrain, "init_score", rep(Logit(mean(labels)), length(labels)))
data(agaricus.test, package = "lightgbm")
test <- agaricus.test
params <- list(
objective = "binary"
, learning_rate = 0.01
, num_leaves = 63L
, learning_rate = 0.1
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
)
model <- lgb.train(params, dtrain, 10L)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 5L
)
tree_interpretation <- lgb.interprete(model, test$data, 1L:5L)
lgb.plot.interpretation(tree_interpretation[[1L]], top_n = 10L)
tree_interpretation <- lgb.interprete(
model = model
, data = agaricus.test$data
, idxset = 1L:5L
)
lgb.plot.interpretation(
tree_interpretation_dt = tree_interpretation[[1L]]
, top_n = 5L
)
}
}

Просмотреть файл

@ -19,7 +19,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}.
\code{\link{lgb.prepare_rules}} if you want to apply this transformation to other datasets.
}
\examples{
library(lightgbm)
data(iris)
str(iris)

Просмотреть файл

@ -22,7 +22,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}.
input. Consider this as a half memory technique which is dangerous, especially for LightGBM.
}
\examples{
library(lightgbm)
data(iris)
str(iris)

Просмотреть файл

@ -22,7 +22,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}.
so you can convert other datasets using this converter.
}
\examples{
library(lightgbm)
data(iris)
str(iris)

Просмотреть файл

@ -25,7 +25,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}.
Consider this as a half memory technique which is dangerous, especially for LightGBM.
}
\examples{
library(lightgbm)
data(iris)
str(iris)

Просмотреть файл

@ -20,6 +20,7 @@ lgb.Booster
Save LightGBM model
}
\examples{
\donttest{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
@ -40,3 +41,4 @@ model <- lgb.train(
)
lgb.save(model, "model.txt")
}
}

Просмотреть файл

@ -83,7 +83,6 @@ a trained booster model \code{lgb.Booster}.
Logic to train with LightGBM
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
@ -95,10 +94,10 @@ valids <- list(test = dtest)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, valids = valids
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
, early_stopping_rounds = 3L
)
}

Просмотреть файл

@ -26,7 +26,6 @@ Attempts to unload LightGBM packages so you can remove objects cleanly without
apparent reason and you do not want to restart R to fix the lost object.
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
@ -38,11 +37,10 @@ valids <- list(test = dtest)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, valids = valids
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
)
\dontrun{

Просмотреть файл

@ -4,7 +4,8 @@
\alias{lgb_shared_params}
\title{Shared parameter docs}
\arguments{
\item{callbacks}{List of callback functions that are applied at each iteration.}
\item{callbacks}{list of callback functions
List of callback functions that are applied at each iteration.}
\item{data}{a \code{lgb.Dataset} object, used for training. Some functions, such as \code{\link{lgb.cv}},
may allow you to pass other types of data like \code{matrix} and then separately supply

Просмотреть файл

@ -45,7 +45,8 @@ If early stopping occurs, the model will have 'best_iter' field.}
\item{init_model}{path of model file of \code{lgb.Booster} object, will continue training from this model}
\item{callbacks}{List of callback functions that are applied at each iteration.}
\item{callbacks}{list of callback functions
List of callback functions that are applied at each iteration.}
\item{...}{Additional arguments passed to \code{\link{lgb.train}}. For example
\itemize{

Просмотреть файл

@ -52,7 +52,6 @@ For regression or binary classification, it returns a vector of length \code{nro
Predicted values based on class \code{lgb.Booster}
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
@ -64,11 +63,10 @@ valids <- list(test = dtest)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, valids = valids
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
)
preds <- predict(model, test$data)
}

Просмотреть файл

@ -18,6 +18,7 @@ readRDS.lgb.Booster(file = "", refhook = NULL)
Attempts to load a model stored in a \code{.rds} file, using \code{\link[base]{readRDS}}
}
\examples{
\donttest{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
@ -38,5 +39,5 @@ model <- lgb.train(
)
saveRDS.lgb.Booster(model, "model.rds")
new_model <- readRDS.lgb.Booster("model.rds")
}
}

Просмотреть файл

@ -42,6 +42,7 @@ Attempts to save a model using RDS. Has an additional parameter (\code{raw})
which decides whether to save the raw model or not.
}
\examples{
\donttest{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
@ -62,3 +63,4 @@ model <- lgb.train(
)
saveRDS.lgb.Booster(model, "model.rds")
}
}

Просмотреть файл

@ -38,7 +38,6 @@ The \code{name} field can be one of the following:
}
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)

Просмотреть файл

@ -24,7 +24,6 @@ Get a new \code{lgb.Dataset} containing the specified rows of
original \code{lgb.Dataset} object
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)