зеркало из https://github.com/microsoft/LightGBM.git
[python-package] respect 'verbose' setting when using custom objective function (fixes #6014) (#6428)
This commit is contained in:
Родитель
525f8b4b80
Коммит
2bc3ab86b7
|
@ -40,9 +40,24 @@ void GetFirstValueAsInt(const std::unordered_map<std::string, std::vector<std::s
|
|||
}
|
||||
|
||||
void Config::SetVerbosity(const std::unordered_map<std::string, std::vector<std::string>>& params) {
|
||||
int verbosity = Config().verbosity;
|
||||
GetFirstValueAsInt(params, "verbose", &verbosity);
|
||||
GetFirstValueAsInt(params, "verbosity", &verbosity);
|
||||
int verbosity = 1;
|
||||
|
||||
// if "verbosity" was found in params, prefer that to any other aliases
|
||||
const auto verbosity_iter = params.find("verbosity");
|
||||
if (verbosity_iter != params.end()) {
|
||||
GetFirstValueAsInt(params, "verbosity", &verbosity);
|
||||
} else {
|
||||
// if "verbose" was found in params and "verbosity" was not, use that value
|
||||
const auto verbose_iter = params.find("verbose");
|
||||
if (verbose_iter != params.end()) {
|
||||
GetFirstValueAsInt(params, "verbose", &verbosity);
|
||||
} else {
|
||||
// if "verbosity" and "verbose" were both missing from params, don't modify LightGBM's log level
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, update LightGBM's log level based on the passed-in value
|
||||
if (verbosity < 0) {
|
||||
LightGBM::Log::ResetLogLevel(LightGBM::LogLevel::Fatal);
|
||||
} else if (verbosity == 0) {
|
||||
|
|
|
@ -383,7 +383,7 @@ def test_add_features_does_not_fail_if_initial_dataset_has_zero_informative_feat
|
|||
arr_a = np.zeros((100, 1), dtype=np.float32)
|
||||
arr_b = rng.uniform(size=(100, 5))
|
||||
|
||||
dataset_a = lgb.Dataset(arr_a).construct()
|
||||
dataset_a = lgb.Dataset(arr_a, params={"verbose": 0}).construct()
|
||||
expected_msg = (
|
||||
"[LightGBM] [Warning] There are no meaningful features which satisfy "
|
||||
"the provided configuration. Decreasing Dataset parameters min_data_in_bin "
|
||||
|
|
|
@ -1469,6 +1469,7 @@ def test_parameters_are_loaded_from_model_file(tmp_path, capsys, rng):
|
|||
"metric": ["l2", "rmse"],
|
||||
"num_leaves": 5,
|
||||
"num_threads": 1,
|
||||
"verbosity": 0,
|
||||
}
|
||||
model_file = tmp_path / "model.txt"
|
||||
orig_bst = lgb.train(params, ds, num_boost_round=1, categorical_feature=[1, 2])
|
||||
|
@ -4274,11 +4275,25 @@ def test_verbosity_and_verbose(capsys):
|
|||
"verbosity": 0,
|
||||
}
|
||||
lgb.train(params, ds, num_boost_round=1)
|
||||
expected_msg = "[LightGBM] [Warning] verbosity is set=0, verbose=1 will be ignored. " "Current value: verbosity=0"
|
||||
expected_msg = "[LightGBM] [Warning] verbosity is set=0, verbose=1 will be ignored. Current value: verbosity=0"
|
||||
stdout = capsys.readouterr().out
|
||||
assert expected_msg in stdout
|
||||
|
||||
|
||||
def test_verbosity_is_respected_when_using_custom_objective(capsys):
|
||||
X, y = make_synthetic_regression()
|
||||
ds = lgb.Dataset(X, y)
|
||||
params = {
|
||||
"objective": mse_obj,
|
||||
"nonsense": 123,
|
||||
"num_leaves": 3,
|
||||
}
|
||||
lgb.train({**params, "verbosity": -1}, ds, num_boost_round=1)
|
||||
assert capsys.readouterr().out == ""
|
||||
lgb.train({**params, "verbosity": 0}, ds, num_boost_round=1)
|
||||
assert "[LightGBM] [Warning] Unknown parameter: nonsense" in capsys.readouterr().out
|
||||
|
||||
|
||||
@pytest.mark.parametrize("verbosity_param", lgb.basic._ConfigAliases.get("verbosity"))
|
||||
@pytest.mark.parametrize("verbosity", [-1, 0])
|
||||
def test_verbosity_can_suppress_alias_warnings(capsys, verbosity_param, verbosity):
|
||||
|
|
|
@ -1290,6 +1290,19 @@ def test_max_depth_warning_is_never_raised(capsys, estimator_class, max_depth):
|
|||
assert "Provided parameters constrain tree depth" not in capsys.readouterr().out
|
||||
|
||||
|
||||
def test_verbosity_is_respected_when_using_custom_objective(capsys):
|
||||
X, y = make_synthetic_regression()
|
||||
params = {
|
||||
"objective": objective_ls,
|
||||
"nonsense": 123,
|
||||
"num_leaves": 3,
|
||||
}
|
||||
lgb.LGBMRegressor(**params, verbosity=-1, n_estimators=1).fit(X, y)
|
||||
assert capsys.readouterr().out == ""
|
||||
lgb.LGBMRegressor(**params, verbosity=0, n_estimators=1).fit(X, y)
|
||||
assert "[LightGBM] [Warning] Unknown parameter: nonsense" in capsys.readouterr().out
|
||||
|
||||
|
||||
@pytest.mark.parametrize("estimator_class", [lgb.LGBMModel, lgb.LGBMClassifier, lgb.LGBMRegressor, lgb.LGBMRanker])
|
||||
def test_getting_feature_names_in_np_input(estimator_class):
|
||||
# input is a numpy array, which doesn't have feature names. LightGBM adds
|
||||
|
|
|
@ -31,7 +31,7 @@ def test_register_logger(tmp_path):
|
|||
eval_records = {}
|
||||
callbacks = [lgb.record_evaluation(eval_records), lgb.log_evaluation(2), lgb.early_stopping(10)]
|
||||
lgb.train(
|
||||
{"objective": "binary", "metric": ["auc", "binary_error"]},
|
||||
{"objective": "binary", "metric": ["auc", "binary_error"], "verbose": 1},
|
||||
lgb_train,
|
||||
num_boost_round=10,
|
||||
feval=dummy_metric,
|
||||
|
|
Загрузка…
Ссылка в новой задаче