add binary logloss metric for new cuda version

This commit is contained in:
Yu Shi 2022-12-13 05:58:43 +00:00
Родитель 6fa4673f69
Коммит 3b2078f769
2 изменённых файлов: 57 добавлений и 2 удалений

Просмотреть файл

@ -0,0 +1,55 @@
/*!
* Copyright (c) 2022 Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See LICENSE file in the project root for
* license information.
*/
#ifndef LIGHTGBM_METRIC_CUDA_CUDA_BINARY_METRIC_HPP_
#define LIGHTGBM_METRIC_CUDA_CUDA_BINARY_METRIC_HPP_
#ifdef USE_CUDA_EXP
#include <LightGBM/cuda/cuda_metric.hpp>
#include <LightGBM/cuda/cuda_utils.h>
#include <vector>
#include "cuda_regression_metric.hpp"
#include "../binary_metric.hpp"
namespace LightGBM {
template <typename HOST_METRIC, typename CUDA_METRIC>
class CUDABinaryMetricInterface: public CUDARegressionMetricInterface<HOST_METRIC, CUDA_METRIC> {
public:
explicit CUDABinaryMetricInterface(const Config& config): CUDARegressionMetricInterface<HOST_METRIC, CUDA_METRIC>(config) {}
virtual ~CUDABinaryMetricInterface() {}
};
class CUDABinaryLoglossMetric: public CUDABinaryMetricInterface<BinaryLoglossMetric, CUDABinaryLoglossMetric> {
public:
explicit CUDABinaryLoglossMetric(const Config& config);
virtual ~CUDABinaryLoglossMetric() {}
__device__ static double MetricOnPointCUDA(label_t label, double score) {
// score should have been converted to probability
if (label <= 0) {
if (1.0f - score > kEpsilon) {
return -log(1.0f - score);
}
} else {
if (score > kEpsilon) {
return -log(score);
}
}
return -log(kEpsilon);
}
};
} // namespace LightGBM
#endif // USE_CUDA_EXP
#endif // LIGHTGBM_METRIC_CUDA_CUDA_BINARY_METRIC_HPP_

Просмотреть файл

@ -11,6 +11,7 @@
#include "regression_metric.hpp"
#include "xentropy_metric.hpp"
#include "cuda/cuda_binary_metric.hpp"
#include "cuda/cuda_regression_metric.hpp"
namespace LightGBM {
@ -39,8 +40,7 @@ Metric* Metric::CreateMetric(const std::string& type, const Config& config) {
Log::Warning("Metric poisson is not implemented in cuda_exp version. Fall back to evaluation on CPU.");
return new PoissonMetric(config);
} else if (type == std::string("binary_logloss")) {
Log::Warning("Metric binary_logloss is not implemented in cuda_exp version. Fall back to evaluation on CPU.");
return new BinaryLoglossMetric(config);
return new CUDABinaryLoglossMetric(config);
} else if (type == std::string("binary_error")) {
Log::Warning("Metric binary_error is not implemented in cuda_exp version. Fall back to evaluation on CPU.");
return new BinaryErrorMetric(config);