This commit is contained in:
Guolin Ke 2016-11-24 18:21:22 +08:00
Родитель 629fc047e2
Коммит b8d9372efd
3 изменённых файлов: 33 добавлений и 7 удалений

Просмотреть файл

@ -21,9 +21,11 @@ script:
- cd $TRAVIS_BUILD_DIR
- mkdir build && cd build && cmake .. && make -j
- cd $TRAVIS_BUILD_DIR/tests/c_api_test && python test.py
- cd $TRAVIS_BUILD_DIR/tests/python_package_test && python test_basic.py
- cd $TRAVIS_BUILD_DIR
- rm -rf build && mkdir build && cd build && cmake -DUSE_MPI=ON ..&& make -j
- cd $TRAVIS_BUILD_DIR/tests/c_api_test && python test.py
- cd $TRAVIS_BUILD_DIR/tests/c_api_test && python test.py
- cd $TRAVIS_BUILD_DIR/tests/python_package_test && python test_basic.py
notifications:
email: false

Просмотреть файл

@ -290,7 +290,7 @@ class Predictor(object):
n_preds *= used_iteration
preds = np.zeros(n_preds, dtype=np.float32)
out_num_preds = ctypes.c_int64(0)
_safe_call(LIB.LGBM_BoosterPredictForMat(
_safe_call(_LIB.LGBM_BoosterPredictForMat(
self.handle,
ptr_data,
type_ptr_data,
@ -324,7 +324,7 @@ class Predictor(object):
ptr_indptr, type_ptr_indptr = c_int_array(csr.indptr)
ptr_data, type_ptr_data = c_float_array(csr.data)
_safe_call(LIB.LGBM_BoosterPredictForCSR(
_safe_call(_LIB.LGBM_BoosterPredictForCSR(
self.handle,
ptr_indptr,
type_ptr_indptr,
@ -447,7 +447,7 @@ class Dataset(object):
init_score = new_init_score
self.set_init_score(init_score)
def new_valid_dataset(self, data, label=None, weight=None, group_id=None,
def create_valid(self, data, label=None, weight=None, group_id=None,
silent=False, params=None):
"""
Create validation data align with current dataset
@ -487,7 +487,7 @@ class Dataset(object):
data = np.array(mat.reshape(mat.size), dtype=np.float32)
ptr_data, type_ptr_data = c_float_array(data)
_safe_call(LIB.LGBM_CreateDatasetFromMat(
_safe_call(_LIB.LGBM_CreateDatasetFromMat(
ptr_data,
type_ptr_data,
mat.shape[0],
@ -825,7 +825,7 @@ class Booster(object):
if self.handle is not None and self.__is_manage_handle:
_safe_call(_LIB.LGBM_BoosterFree(self.handle))
def add_valid_data(self, data, name):
def add_valid(self, data, name):
if data.predictor is not self.init_predictor:
raise Exception("Add validation data failed, you should use same predictor for these data")
_safe_call(_LIB.LGBM_BoosterAddValidData(
@ -835,7 +835,7 @@ class Booster(object):
self.name_valid_sets.append(name)
self.__num_dataset += 1
def ResetParameter(self, params, silent=False):
def reset_parameter(self, params, silent=False):
self.__need_reload_eval_info = True
if silent:
params["verbose"] = 0

Просмотреть файл

@ -0,0 +1,24 @@
import numpy as np
from sklearn import datasets, metrics, model_selection
import importlib.util
spec = importlib.util.spec_from_file_location("module.name", "../../python-package/lightgbm/basic.py")
lgb = importlib.util.module_from_spec(spec)
spec.loader.exec_module(lgb)
X, Y = datasets.make_classification(n_samples=100000, n_features=100)
x_train, x_test, y_train, y_test = model_selection.train_test_split(X, Y, test_size=0.1)
train_data = lgb.Dataset(x_train, max_bin=255, label=y_train)
valid_data = train_data.create_valid(x_test, label=y_test)
config={"objective":"binary","metric":"auc", "min_data":1, "num_leaves":15}
bst = lgb.Booster(params=config, train_set=train_data)
bst.add_valid(valid_data,"valid_1")
for i in range(100):
bst.update()
print(bst.eval_train())
print(bst.eval_valid())
bst.save_model("model.txt")