Fix misc. errors flagged by flake8

Signed-off-by: Keith Battocchi <kebatt@microsoft.com>
This commit is contained in:
Keith Battocchi 2024-02-07 13:20:29 -05:00 коммит произвёл Keith Battocchi
Родитель 079d65998b
Коммит 639d28cb09
6 изменённых файлов: 11 добавлений и 13 удалений

Просмотреть файл

@ -853,7 +853,7 @@ class _OrthoLearner(TreatmentExpansionMixin, LinearCateEstimator):
for nuisance_mc_variants in zip(*all_nuisances))
else:
raise ValueError(
"Parameter `mc_agg` must be one of {'mean', 'median'}. Got {}".format(self.mc_agg))
f"Parameter `mc_agg` must be one of {{'mean', 'median'}}. Got {self.mc_agg}")
Y, T, X, W, Z, sample_weight, freq_weight, sample_var = (self._subinds_check_none(arr, fitted_inds)
for arr in (Y, T, X, W, Z, sample_weight,

Просмотреть файл

@ -4,6 +4,7 @@
import numpy as np
import unittest
import pytest
import matplotlib
from econml.cate_interpreter import SingleTreeCateInterpreter, SingleTreePolicyInterpreter
from econml.dml import LinearDML
from sklearn.linear_model import LinearRegression, LogisticRegression
@ -16,7 +17,6 @@ try:
except Exception:
graphviz_works = False
import matplotlib
matplotlib.use('Agg')

Просмотреть файл

@ -3,7 +3,6 @@
from sklearn.datasets import make_regression
from econml._ortho_learner import _OrthoLearner, _crossfit
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression, LassoCV, Lasso
from sklearn.model_selection import KFold

Просмотреть файл

@ -4,10 +4,10 @@
import numpy as np
import unittest
import shap
from econml.dml import *
from econml.orf import *
from econml.dr import *
from econml.metalearners import *
from econml.dml import LinearDML, CausalForestDML, NonParamDML
from econml.orf import DMLOrthoForest, DROrthoForest
from econml.dr import DRLearner, ForestDRLearner
from econml.metalearners import TLearner, SLearner, XLearner, DomainAdaptationLearner
from sklearn.linear_model import LinearRegression, LogisticRegression, Lasso
from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier
from sklearn.preprocessing import PolynomialFeatures

Просмотреть файл

@ -416,7 +416,7 @@ class TestStatsModels(unittest.TestCase):
"{}, {}".format(est.coef__interval()[1][t],
np.array([scipy.stats.norm.ppf(.975, loc=1, scale=1)] +
[scipy.stats.norm.ppf(.975, loc=0, scale=1)] * (d - 1)))
assert np.all(np.abs(est.intercept_[t]) <= 1e-12), "{}, {}".format(est.intercept_[t])
assert np.all(np.abs(est.intercept_[t]) <= 1e-12), "{}".format(est.intercept_[t])
assert np.all(np.abs(est.intercept_stderr_[t]) <= 1e-12), "{}".format(est.intercept_stderr_[t])
assert np.all(np.abs(est.intercept__interval()[0][t]) <=
1e-12), "{}".format(est.intercept__interval()[0][t])
@ -446,7 +446,7 @@ class TestStatsModels(unittest.TestCase):
"{}, {}".format(est.coef__interval()[1][t],
np.array([scipy.stats.norm.ppf(.975, loc=1, scale=np.sqrt(2))] +
[scipy.stats.norm.ppf(.975, loc=0, scale=np.sqrt(2))] * (d - 1)))
assert np.all(np.abs(est.intercept_[t]) <= 1e-12), "{}, {}".format(est.intercept_[t])
assert np.all(np.abs(est.intercept_[t]) <= 1e-12), "{}".format(est.intercept_[t])
assert np.all(np.abs(est.intercept_stderr_[t] - 1) <= 1e-12), "{}".format(est.intercept_stderr_[t])
assert np.all(np.abs(est.intercept__interval()[0][t] -
scipy.stats.norm.ppf(.025, loc=0, scale=1)) <= 1e-12), \

Просмотреть файл

@ -14,8 +14,6 @@ import argparse
import warnings
import joblib
from sklearn.model_selection import GridSearchCV
from statsmodels.tools.tools import add_constant
from econml.utilities import cross_product
from sklearn.multioutput import MultiOutputRegressor
@ -251,7 +249,7 @@ def run_all_mc(first_stage, folder, n_list, n_exp, hetero_coef_list, d_list,
(hetero_coef * X[:, [0]] + 1) * np.random.normal(0, 1, size=(n, p))
XT = np.hstack([X, T])
X1, X2, y1, y2, X_final_first, X_final_sec, y_sum_first, y_sum_sec,\
X1, X2, y1, y2, X_final_first, X_final_sec, y_sum_first, y_sum_sec, \
n_sum_first, n_sum_sec, var_first, var_sec = _summarize(XT, y)
X = np.vstack([X1, X2])
y = np.concatenate((y1, y2))
@ -420,7 +418,8 @@ def monte_carlo_gcv(folder='gcv'):
min_samples_leaf=10, random_state=123),
MultiOutputRegressor(GradientBoostingRegressor(n_estimators=20,
max_depth=3,
min_samples_leaf=10, random_state=123))],
min_samples_leaf=10,
random_state=123))],
param_grid_list=[{},
{},
{},