Skip to content

Commit

Permalink
Merge branch 'master' into Rinterface7
Browse files Browse the repository at this point in the history
  • Loading branch information
jameslamb committed Feb 23, 2022
2 parents b97841b + a1fbe84 commit 2e5a749
Show file tree
Hide file tree
Showing 24 changed files with 310 additions and 179 deletions.
2 changes: 1 addition & 1 deletion .appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ version: 3.3.2.99.{build}
image: Visual Studio 2015
platform: x64
configuration: # a trick to construct a build matrix with multiple Python versions
- 3.7
- '3.7'

# only build pull requests and
# commits to 'master'
Expand Down
11 changes: 9 additions & 2 deletions .ci/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,15 @@ cd $BUILD_DIRECTORY

if [[ $TASK == "check-docs" ]] || [[ $TASK == "check-links" ]]; then
cd $BUILD_DIRECTORY/docs
conda install -q -y -n $CONDA_ENV -c conda-forge doxygen rstcheck
pip install --user -r requirements.txt
conda env update \
-n $CONDA_ENV \
--file ./env.yml || exit -1
conda install \
-q \
-y \
-n $CONDA_ENV \
doxygen \
rstcheck || exit -1
# check reStructuredText formatting
cd $BUILD_DIRECTORY/python-package
rstcheck --report warning $(find . -type f -name "*.rst") || exit -1
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -346,6 +346,7 @@ instance/
# Sphinx documentation
docs/_build/
docs/pythonapi/
*.flag

# Doxygen documentation
docs/doxyoutput/
Expand Down
10 changes: 6 additions & 4 deletions .readthedocs.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
version: 2
build:
os: "ubuntu-20.04"
tools:
python: "miniconda3-4.7"
conda:
environment: docs/env.yml
formats:
- pdf
python:
version: 3
install:
- requirements: docs/requirements.txt
sphinx:
builder: html
configuration: docs/conf.py
Expand Down
3 changes: 2 additions & 1 deletion .vsts-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ jobs:
PYTHON_VERSION: '3.7'
bdist:
TASK: bdist
PYTHON_VERSION: '3.8'
inference:
TASK: if-else
mpi_source:
Expand All @@ -52,7 +53,7 @@ jobs:
METHOD: source
# on Ubuntu 14.04, gpu_source build segfaults with newer version of Python
# (and newer version of scipy as a consequence)
PYTHON_VERSION: 3.7
PYTHON_VERSION: '3.7'
swig:
TASK: swig
steps:
Expand Down
140 changes: 61 additions & 79 deletions R-package/R/lgb.Booster.R
Original file line number Diff line number Diff line change
Expand Up @@ -26,108 +26,90 @@ Booster <- R6::R6Class(
modelfile = NULL,
model_str = NULL) {

# Create parameters and handle
handle <- NULL

# Attempts to create a handle for the dataset
try({

# Check if training dataset is not null
if (!is.null(train_set)) {
# Check if training dataset is lgb.Dataset or not
if (!lgb.is.Dataset(train_set)) {
stop("lgb.Booster: Can only use lgb.Dataset as training data")
}
train_set_handle <- train_set$.__enclos_env__$private$get_handle()
params <- utils::modifyList(params, train_set$get_params())
params_str <- lgb.params2str(params = params)
# Store booster handle
handle <- .Call(
LGBM_BoosterCreate_R
, train_set_handle
, params_str
)

# Create private booster information
private$train_set <- train_set
private$train_set_version <- train_set$.__enclos_env__$private$version
private$num_dataset <- 1L
private$init_predictor <- train_set$.__enclos_env__$private$predictor

# Check if predictor is existing
if (!is.null(private$init_predictor)) {

# Merge booster
.Call(
LGBM_BoosterMerge_R
, handle
, private$init_predictor$.__enclos_env__$private$handle
)

}

# Check current iteration
private$is_predicted_cur_iter <- c(private$is_predicted_cur_iter, FALSE)
if (!is.null(train_set)) {

} else if (!is.null(modelfile)) {
if (!lgb.is.Dataset(train_set)) {
stop("lgb.Booster: Can only use lgb.Dataset as training data")
}
train_set_handle <- train_set$.__enclos_env__$private$get_handle()
params <- utils::modifyList(params, train_set$get_params())
params_str <- lgb.params2str(params = params)
# Store booster handle
handle <- .Call(
LGBM_BoosterCreate_R
, train_set_handle
, params_str
)

# Do we have a model file as character?
if (!is.character(modelfile)) {
stop("lgb.Booster: Can only use a string as model file path")
}
# Create private booster information
private$train_set <- train_set
private$train_set_version <- train_set$.__enclos_env__$private$version
private$num_dataset <- 1L
private$init_predictor <- train_set$.__enclos_env__$private$predictor

modelfile <- path.expand(modelfile)
if (!is.null(private$init_predictor)) {

# Create booster from model
handle <- .Call(
LGBM_BoosterCreateFromModelfile_R
, modelfile
# Merge booster
.Call(
LGBM_BoosterMerge_R
, handle
, private$init_predictor$.__enclos_env__$private$handle
)

} else if (!is.null(model_str)) {
}

# Do we have a model_str as character/raw?
if (!is.raw(model_str) && !is.character(model_str)) {
stop("lgb.Booster: Can only use a character/raw vector as model_str")
}
# Check current iteration
private$is_predicted_cur_iter <- c(private$is_predicted_cur_iter, FALSE)

# Create booster from model
handle <- .Call(
LGBM_BoosterLoadModelFromString_R
, model_str
)
} else if (!is.null(modelfile)) {

} else {
# Do we have a model file as character?
if (!is.character(modelfile)) {
stop("lgb.Booster: Can only use a string as model file path")
}

# Booster non existent
stop(
"lgb.Booster: Need at least either training dataset, "
, "model file, or model_str to create booster instance"
)
modelfile <- path.expand(modelfile)

}
# Create booster from model
handle <- .Call(
LGBM_BoosterCreateFromModelfile_R
, modelfile
)

})
} else if (!is.null(model_str)) {

# Check whether the handle was created properly if it was not stopped earlier by a stop call
if (isTRUE(lgb.is.null.handle(x = handle))) {
# Do we have a model_str as character/raw?
if (!is.raw(model_str) && !is.character(model_str)) {
stop("lgb.Booster: Can only use a character/raw vector as model_str")
}

stop("lgb.Booster: cannot create Booster handle")
# Create booster from model
handle <- .Call(
LGBM_BoosterLoadModelFromString_R
, model_str
)

} else {

# Create class
class(handle) <- "lgb.Booster.handle"
private$handle <- handle
private$num_class <- 1L
.Call(
LGBM_BoosterGetNumClasses_R
, private$handle
, private$num_class
# Booster non existent
stop(
"lgb.Booster: Need at least either training dataset, "
, "model file, or model_str to create booster instance"
)

}

class(handle) <- "lgb.Booster.handle"
private$handle <- handle
private$num_class <- 1L
.Call(
LGBM_BoosterGetNumClasses_R
, private$handle
, private$num_class
)

self$params <- params

return(invisible(NULL))
Expand Down
1 change: 0 additions & 1 deletion R-package/R/lgb.cv.R
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,6 @@ lgb.cv <- function(params = list()
early_stopping_rounds <- params[["early_stopping_round"]]

# extract any function objects passed for objective or metric
params <- lgb.check.obj(params = params)
fobj <- NULL
if (is.function(params$objective)) {
fobj <- params$objective
Expand Down
1 change: 0 additions & 1 deletion R-package/R/lgb.train.R
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,6 @@ lgb.train <- function(params = list(),
early_stopping_rounds <- params[["early_stopping_round"]]

# extract any function objects passed for objective or metric
params <- lgb.check.obj(params = params)
fobj <- NULL
if (is.function(params$objective)) {
fobj <- params$objective
Expand Down
58 changes: 0 additions & 58 deletions R-package/R/utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -117,64 +117,6 @@ lgb.check_interaction_constraints <- function(interaction_constraints, column_na

}

lgb.check.obj <- function(params) {

# List known objectives in a vector
OBJECTIVES <- c(
"regression"
, "regression_l1"
, "regression_l2"
, "mean_squared_error"
, "mse"
, "l2_root"
, "root_mean_squared_error"
, "rmse"
, "mean_absolute_error"
, "mae"
, "quantile"
, "huber"
, "fair"
, "poisson"
, "binary"
, "lambdarank"
, "multiclass"
, "softmax"
, "multiclassova"
, "multiclass_ova"
, "ova"
, "ovr"
, "xentropy"
, "cross_entropy"
, "xentlambda"
, "cross_entropy_lambda"
, "mean_absolute_percentage_error"
, "mape"
, "gamma"
, "tweedie"
, "rank_xendcg"
, "xendcg"
, "xe_ndcg"
, "xe_ndcg_mart"
, "xendcg_mart"
)

if (is.null(params$objective)) {
stop("lgb.check.obj: objective should be a character or a function")
}

if (is.character(params$objective)) {

if (!(params$objective %in% OBJECTIVES)) {

stop("lgb.check.obj: objective name error should be one of (", paste0(OBJECTIVES, collapse = ", "), ")")

}

}

return(params)

}

# [description]
# Take any character values from eval and store them in params$metric.
Expand Down
32 changes: 32 additions & 0 deletions R-package/tests/testthat/test_basic.R
Original file line number Diff line number Diff line change
Expand Up @@ -520,6 +520,22 @@ test_that("lgb.cv() respects showsd argument", {
expect_identical(evals_no_showsd[["eval_err"]], list())
})

test_that("lgb.cv() raises an informative error for unrecognized objectives", {
dtrain <- lgb.Dataset(
data = train$data
, label = train$label
)
expect_error({
bst <- lgb.cv(
data = dtrain
, params = list(
objective_type = "not_a_real_objective"
, verbosity = VERBOSITY
)
)
}, regexp = "Unknown objective type name: not_a_real_objective")
})

test_that("lgb.cv() respects parameter aliases for objective", {
nrounds <- 3L
nfold <- 4L
Expand Down Expand Up @@ -663,6 +679,22 @@ test_that("lgb.train() works as expected with multiple eval metrics", {
)
})

test_that("lgb.train() raises an informative error for unrecognized objectives", {
dtrain <- lgb.Dataset(
data = train$data
, label = train$label
)
expect_error({
bst <- lgb.train(
data = dtrain
, params = list(
objective_type = "not_a_real_objective"
, verbosity = VERBOSITY
)
)
}, regexp = "Unknown objective type name: not_a_real_objective")
})

test_that("lgb.train() respects parameter aliases for objective", {
nrounds <- 3L
dtrain <- lgb.Dataset(
Expand Down
Loading

0 comments on commit 2e5a749

Please sign in to comment.