Adding ProductRegressor to gcm auto assignment
This model simply takes the product of the inputs. Signed-off-by: Patrick Bloebaum <bloebp@amazon.com>
This commit is contained in:
Родитель
af30e333ae
Коммит
f79cd0d0ba
|
@ -34,7 +34,12 @@ from dowhy.gcm.ml.classification import (
|
|||
create_random_forest_classifier,
|
||||
create_support_vector_classifier,
|
||||
)
|
||||
from dowhy.gcm.ml.regression import create_ada_boost_regressor, create_extra_trees_regressor, create_knn_regressor
|
||||
from dowhy.gcm.ml.regression import (
|
||||
create_ada_boost_regressor,
|
||||
create_extra_trees_regressor,
|
||||
create_knn_regressor,
|
||||
create_product_regressor,
|
||||
)
|
||||
from dowhy.gcm.stochastic_models import EmpiricalDistribution
|
||||
from dowhy.gcm.util.general import (
|
||||
apply_one_hot_encoding,
|
||||
|
@ -65,6 +70,7 @@ _LIST_OF_POTENTIAL_REGRESSORS = [
|
|||
create_extra_trees_regressor,
|
||||
create_knn_regressor,
|
||||
create_ada_boost_regressor,
|
||||
create_product_regressor,
|
||||
]
|
||||
|
||||
|
||||
|
@ -108,7 +114,6 @@ def assign_causal_mechanisms(
|
|||
|
||||
:return: None
|
||||
"""
|
||||
|
||||
for node in causal_model.graph.nodes:
|
||||
if not override_models and CAUSAL_MECHANISM in causal_model.graph.nodes[node]:
|
||||
validate_causal_model_assignment(causal_model.graph, node)
|
||||
|
@ -133,7 +138,6 @@ def select_model(
|
|||
X: np.ndarray, Y: np.ndarray, model_selection_quality: AssignmentQuality
|
||||
) -> Union[PredictionModel, ClassificationModel]:
|
||||
target_is_categorical = is_categorical(Y)
|
||||
|
||||
if model_selection_quality == AssignmentQuality.GOOD:
|
||||
use_linear_prediction_models = has_linear_relationship(X, Y)
|
||||
|
||||
|
@ -144,9 +148,13 @@ def select_model(
|
|||
return create_hist_gradient_boost_classifier()
|
||||
else:
|
||||
if use_linear_prediction_models:
|
||||
return create_linear_regressor()
|
||||
return find_best_model(
|
||||
[create_linear_regressor, create_product_regressor], X, Y, model_selection_splits=2
|
||||
)()
|
||||
else:
|
||||
return create_hist_gradient_boost_regressor()
|
||||
return find_best_model(
|
||||
[create_hist_gradient_boost_regressor, create_product_regressor], X, Y, model_selection_splits=2
|
||||
)()
|
||||
elif model_selection_quality == AssignmentQuality.BETTER:
|
||||
if target_is_categorical:
|
||||
return find_best_model(_LIST_OF_POTENTIAL_CLASSIFIERS, X, Y)()
|
||||
|
|
|
@ -115,6 +115,10 @@ def create_ada_boost_regressor(**kwargs) -> SklearnRegressionModel:
|
|||
return SklearnRegressionModel(AdaBoostRegressor(**kwargs))
|
||||
|
||||
|
||||
def create_product_regressor() -> PredictionModel:
|
||||
return ProductRegressor()
|
||||
|
||||
|
||||
class InvertibleIdentityFunction(InvertibleFunction):
|
||||
def evaluate(self, X: np.ndarray) -> np.ndarray:
|
||||
return X
|
||||
|
@ -137,3 +141,15 @@ class InvertibleLogarithmicFunction(InvertibleFunction):
|
|||
|
||||
def evaluate_inverse(self, X: np.ndarray) -> np.ndarray:
|
||||
return np.exp(X)
|
||||
|
||||
|
||||
class ProductRegressor(PredictionModel):
|
||||
def fit(self, X, Y):
|
||||
# Nothing to fit here.
|
||||
pass
|
||||
|
||||
def predict(self, X):
|
||||
return np.prod(X, axis=1).reshape(-1, 1)
|
||||
|
||||
def clone(self):
|
||||
return ProductRegressor()
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
import numpy as np
|
||||
from _pytest.python_api import approx
|
||||
|
||||
from dowhy.gcm.ml.regression import create_product_regressor
|
||||
|
||||
|
||||
def test_given_product_regressor_then_computes_correct_values():
|
||||
X = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
|
||||
|
||||
mdl = create_product_regressor()
|
||||
|
||||
assert mdl.predict(X).reshape(-1) == approx(np.array([6, 120, 504]))
|
Загрузка…
Ссылка в новой задаче