зеркало из https://github.com/py-why/EconML.git
Fix E731
Signed-off-by: Keith Battocchi <kebatt@microsoft.com>
This commit is contained in:
Родитель
d99a086e14
Коммит
cce3ded03c
|
@ -310,11 +310,13 @@
|
|||
"# Outcome support\n",
|
||||
"support_Y = np.random.choice(np.arange(n_w), size=support_size, replace=False)\n",
|
||||
"coefs_Y = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"epsilon_sample = lambda n: np.random.uniform(-1, 1, size=n)\n",
|
||||
"def epsilon_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
"# Treatment support\n",
|
||||
"support_T = support_Y\n",
|
||||
"coefs_T = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"eta_sample = lambda n: np.random.uniform(-1, 1, size=n)\n",
|
||||
"def eta_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
"\n",
|
||||
"# Generate controls, covariates, treatments and outcomes\n",
|
||||
"W = np.random.normal(0, 1, size=(n, n_w))\n",
|
||||
|
|
|
@ -125,11 +125,13 @@
|
|||
"# Outcome support\n",
|
||||
"support_Y = np.random.choice(range(n_w), size=support_size, replace=False)\n",
|
||||
"coefs_Y = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"epsilon_sample = lambda n: np.random.uniform(-1, 1, size=n)\n",
|
||||
"def epsilon_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
"# Treatment support \n",
|
||||
"support_T = support_Y\n",
|
||||
"coefs_T = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"eta_sample = lambda n: np.random.uniform(-1, 1, size=n) \n",
|
||||
"def eta_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n) \n",
|
||||
"\n",
|
||||
"# Generate controls, covariates, treatments and outcomes\n",
|
||||
"W = np.random.normal(0, 1, size=(n, n_w))\n",
|
||||
|
@ -552,11 +554,13 @@
|
|||
"# Outcome support\n",
|
||||
"support_Y = np.random.choice(range(n_w), size=support_size, replace=False)\n",
|
||||
"coefs_Y = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"epsilon_sample = lambda n: np.random.uniform(-1, 1, size=n)\n",
|
||||
"def epsilon_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
"# Treatment support\n",
|
||||
"support_T = support_Y\n",
|
||||
"coefs_T = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"eta_sample = lambda n: np.random.uniform(-1, 1, size=n) \n",
|
||||
"def eta_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n) \n",
|
||||
"\n",
|
||||
"# Generate controls, covariates, treatments and outcomes\n",
|
||||
"W = np.random.normal(0, 1, size=(n, n_w))\n",
|
||||
|
@ -895,11 +899,13 @@
|
|||
" # Outcome support\n",
|
||||
" support_Y = np.random.choice(range(n_w), size=support_size, replace=False)\n",
|
||||
" coefs_Y = np.random.uniform(0, 1, size=support_size)\n",
|
||||
" epsilon_sample = lambda n: np.random.uniform(-1, 1, size=n)\n",
|
||||
" def epsilon_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
" # Treatment support \n",
|
||||
" support_T = support_Y\n",
|
||||
" coefs_T = np.random.uniform(0, 1, size=(support_size, n_treatments))\n",
|
||||
" eta_sample = lambda n: np.random.uniform(-1, 1, size=n) \n",
|
||||
" def eta_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n) \n",
|
||||
" # Generate controls, covariates, treatments and outcomes\n",
|
||||
" W = np.random.normal(0, 1, size=(n, n_w))\n",
|
||||
" X = np.random.uniform(0, 1, size=(n, n_x))\n",
|
||||
|
|
|
@ -119,11 +119,13 @@
|
|||
"# Outcome support\n",
|
||||
"support_Y = np.random.choice(range(n_x), size=support_size, replace=False)\n",
|
||||
"coefs_Y = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"epsilon_sample = lambda n:np.random.uniform(-1, 1, size=n)\n",
|
||||
"def epsilon_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
"# Treatment support\n",
|
||||
"support_T = support_Y\n",
|
||||
"coefs_T = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"eta_sample = lambda n: np.random.uniform(-1, 1, size=n) \n",
|
||||
"def eta_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n) \n",
|
||||
"\n",
|
||||
"# Generate controls, covariates, treatments and outcomes\n",
|
||||
"X = np.random.uniform(0, 1, size=(n, n_x))\n",
|
||||
|
@ -155,8 +157,10 @@
|
|||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"reg = lambda: RandomForestRegressor(min_samples_leaf=10)\n",
|
||||
"clf = lambda: RandomForestClassifier(min_samples_leaf=10)"
|
||||
"def reg():\n",
|
||||
" return RandomForestRegressor(min_samples_leaf=10)\n",
|
||||
"def clf():\n",
|
||||
" return RandomForestClassifier(min_samples_leaf=10)"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -421,8 +425,10 @@
|
|||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"reg = lambda: RandomForestRegressor(min_samples_leaf=10, random_state=123)\n",
|
||||
"clf = lambda: RandomForestClassifier(min_samples_leaf=10, random_state=123)"
|
||||
"def reg():\n",
|
||||
" return RandomForestRegressor(min_samples_leaf=10, random_state=123)\n",
|
||||
"def clf():\n",
|
||||
" return RandomForestClassifier(min_samples_leaf=10, random_state=123)"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
@ -83,7 +83,8 @@
|
|||
"p = 10\n",
|
||||
"W = np.random.uniform(size=(n, p))\n",
|
||||
"X = np.random.uniform(size=(n, 1))\n",
|
||||
"true_effect = lambda x: x[:, 0] ** 2\n",
|
||||
"def true_effect(x):\n",
|
||||
" return x[:, 0] ** 2\n",
|
||||
"T = W[:, 0] + W[:, 1] ** 2 + np.random.uniform(-1, 1, size=n)\n",
|
||||
"Y = (\n",
|
||||
" true_effect(X) * T\n",
|
||||
|
@ -158,12 +159,8 @@
|
|||
}
|
||||
],
|
||||
"source": [
|
||||
"first_stage = lambda: GridSearchCV(\n",
|
||||
" estimator=GradientBoostingRegressor(),\n",
|
||||
" param_grid={\"max_depth\": [3, 5, None], \"n_estimators\": (50, 100, 200)},\n",
|
||||
" cv=2,\n",
|
||||
" n_jobs=-1,\n",
|
||||
")\n",
|
||||
"def first_stage():\n",
|
||||
" return GridSearchCV(estimator=GradientBoostingRegressor(), param_grid={\"max_depth\": [3, 5, None], \"n_estimators\": (50, 100, 200)}, cv=2, n_jobs=-1)\n",
|
||||
"est = LinearDML(\n",
|
||||
" model_y=first_stage(),\n",
|
||||
" model_t=first_stage(),\n",
|
||||
|
@ -381,14 +378,8 @@
|
|||
}
|
||||
],
|
||||
"source": [
|
||||
"first_stage = lambda: GridSearchCVList(\n",
|
||||
" [Lasso(max_iter=10000), GradientBoostingRegressor()],\n",
|
||||
" param_grid_list=[\n",
|
||||
" {\"alpha\": [0.001, 0.01, 0.1, 1, 10]},\n",
|
||||
" {\"max_depth\": [3, 5, None], \"n_estimators\": [50, 100, 200]},\n",
|
||||
" ],\n",
|
||||
" cv=2,\n",
|
||||
")"
|
||||
"def first_stage():\n",
|
||||
" return GridSearchCVList([Lasso(max_iter=10000), GradientBoostingRegressor()], param_grid_list=[{\"alpha\": [0.001, 0.01, 0.1, 1, 10]}, {\"max_depth\": [3, 5, None], \"n_estimators\": [50, 100, 200]}], cv=2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
@ -397,7 +397,8 @@
|
|||
"outputs": [],
|
||||
"source": [
|
||||
"# Define underlying treatment effect function\n",
|
||||
"TE_fn = lambda X: np.hstack([5000 + 2 / 100 * X, 5 / 100 * X])\n",
|
||||
"def TE_fn(X):\n",
|
||||
" return np.hstack([5000 + 2 / 100 * X, 5 / 100 * X])\n",
|
||||
"true_TE = TE_fn(X)\n",
|
||||
"\n",
|
||||
"# Define true coefficients for the three treatments\n",
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -369,7 +369,8 @@
|
|||
"outputs": [],
|
||||
"source": [
|
||||
"# Define underlying treatment effect function \n",
|
||||
"TE_fn = lambda X: (0.2 + 0.3 * X['days_visited_free_pre'] - 0.2 * X['days_visited_hs_pre'] + X['os_type_osx']).values\n",
|
||||
"def TE_fn(X):\n",
|
||||
" return (0.2 + 0.3 * X['days_visited_free_pre'] - 0.2 * X['days_visited_hs_pre'] + X['os_type_osx']).values\n",
|
||||
"true_TE = TE_fn(X_data)\n",
|
||||
"\n",
|
||||
"# Define the true coefficients to compare with\n",
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -139,11 +139,13 @@
|
|||
"# Outcome support\n",
|
||||
"support_Y = np.random.choice(np.arange(n_w), size=support_size, replace=False)\n",
|
||||
"coefs_Y = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"epsilon_sample = lambda n: np.random.uniform(-1, 1, size=n)\n",
|
||||
"def epsilon_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
"# Treatment support\n",
|
||||
"support_T = support_Y\n",
|
||||
"coefs_T = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"eta_sample = lambda n: np.random.uniform(-1, 1, size=n)\n",
|
||||
"def eta_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
"\n",
|
||||
"# Generate controls, covariates, treatments and outcomes\n",
|
||||
"W = np.random.normal(0, 1, size=(n, n_w))\n",
|
||||
|
@ -769,11 +771,13 @@
|
|||
"# Outcome support\n",
|
||||
"support_Y = np.random.choice(range(n_w), size=support_size, replace=False)\n",
|
||||
"coefs_Y = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"epsilon_sample = lambda n:np.random.uniform(-1, 1, size=n)\n",
|
||||
"def epsilon_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
"# Treatment support\n",
|
||||
"support_T = support_Y\n",
|
||||
"coefs_T = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"eta_sample = lambda n: np.random.uniform(-1, 1, size=n) \n",
|
||||
"def eta_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n) \n",
|
||||
"\n",
|
||||
"# Generate controls, covariates, treatments and outcomes\n",
|
||||
"W = np.random.normal(0, 1, size=(n, n_w))\n",
|
||||
|
@ -1632,11 +1636,13 @@
|
|||
"# Outcome support\n",
|
||||
"support_Y = np.random.choice(np.arange(n_w), size=support_size, replace=False)\n",
|
||||
"coefs_Y = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"epsilon_sample = lambda n: np.random.uniform(-1, 1, size=n)\n",
|
||||
"def epsilon_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
"# Treatment support\n",
|
||||
"support_T = support_Y\n",
|
||||
"coefs_T = np.random.uniform(0, 1, size=support_size)\n",
|
||||
"eta_sample = lambda n: np.random.uniform(-1, 1, size=n)\n",
|
||||
"def eta_sample(n):\n",
|
||||
" return np.random.uniform(-1, 1, size=n)\n",
|
||||
"\n",
|
||||
"# Generate controls, covariates, treatments and outcomes\n",
|
||||
"W = np.random.normal(0, 1, size=(n, n_w))\n",
|
||||
|
|
|
@ -66,9 +66,12 @@
|
|||
"n = 2000\n",
|
||||
"p = 10\n",
|
||||
"X = np.random.normal(size=(n, p))\n",
|
||||
"true_propensity = lambda x: .4 + .2 * (x[:, 0] > 0)\n",
|
||||
"true_effect = lambda x: (x[:, 0] * (x[:, 0] > 0))\n",
|
||||
"true_conf = lambda x: x[:, 1] + np.clip(x[:, 2], - np.inf, 0)\n",
|
||||
"def true_propensity(x):\n",
|
||||
" return 0.4 + 0.2 * (x[:, 0] > 0)\n",
|
||||
"def true_effect(x):\n",
|
||||
" return x[:, 0] * (x[:, 0] > 0)\n",
|
||||
"def true_conf(x):\n",
|
||||
" return x[:, 1] + np.clip(x[:, 2], -np.inf, 0)\n",
|
||||
"T = np.random.binomial(1, true_propensity(X))\n",
|
||||
"Y = true_effect(X) * T + true_conf(X) + np.random.normal(size=(n,))"
|
||||
]
|
||||
|
|
|
@ -84,8 +84,8 @@
|
|||
"n_treatments = 1\n",
|
||||
"# true_te = lambda X: np.hstack([X[:, [0]]**2 + 1, np.ones((X.shape[0], n_treatments - 1))])\n",
|
||||
"# true_te = lambda X: np.hstack([X[:, [0]]>0, np.ones((X.shape[0], n_treatments - 1))])\n",
|
||||
"true_te = lambda X: np.hstack([(X[:, [0]]>0) * X[:, [0]],\n",
|
||||
" np.ones((X.shape[0], n_treatments - 1))*np.arange(1, n_treatments).reshape(1, -1)])\n",
|
||||
"def true_te(X):\n",
|
||||
" return np.hstack([(X[:, [0]] > 0) * X[:, [0]], np.ones((X.shape[0], n_treatments - 1)) * np.arange(1, n_treatments).reshape(1, -1)])\n",
|
||||
"X = np.random.normal(0, 1, size=(n_samples, n_features))\n",
|
||||
"T = np.random.normal(0, 1, size=(n_samples, n_treatments))\n",
|
||||
"for t in range(n_treatments):\n",
|
||||
|
@ -378,7 +378,8 @@
|
|||
"n_features = 10\n",
|
||||
"n_treatments = 2\n",
|
||||
"# true_te = lambda X: np.hstack([X[:, [0]]**2 + 1, np.ones((X.shape[0], n_treatments - 1))])\n",
|
||||
"true_te = lambda X: np.hstack([X[:, [0]]>0, np.ones((X.shape[0], n_treatments - 1))])\n",
|
||||
"def true_te(X):\n",
|
||||
" return np.hstack([X[:, [0]] > 0, np.ones((X.shape[0], n_treatments - 1))])\n",
|
||||
"# true_te = lambda X: np.hstack([(X[:, [0]]>0) * X[:, [0]],\n",
|
||||
"# np.ones((X.shape[0], n_treatments - 1))*np.arange(1, n_treatments).reshape(1, -1)])\n",
|
||||
"Z = np.random.normal(0, 1, size=(n_samples, n_treatments))\n",
|
||||
|
@ -616,7 +617,8 @@
|
|||
"n_samples = 2000\n",
|
||||
"n_features = 10\n",
|
||||
"n_outputs = 2\n",
|
||||
"true_te = lambda X: np.hstack([X[:, [0]]**2 + 1, np.ones((X.shape[0], n_outputs - 1))])\n",
|
||||
"def true_te(X):\n",
|
||||
" return np.hstack([X[:, [0]] ** 2 + 1, np.ones((X.shape[0], n_outputs - 1))])\n",
|
||||
"# true_te = lambda X: np.hstack([X[:, [0]]>0, np.ones((X.shape[0], n_outputs - 1))])\n",
|
||||
"# true_te = lambda X: np.hstack([(X[:, [0]]>0) * X[:, [0]],\n",
|
||||
"# np.ones((X.shape[0], n_outputs - 1))*np.arange(1, n_outputs).reshape(1, -1)])\n",
|
||||
|
@ -808,8 +810,8 @@
|
|||
"n_outputs = 2\n",
|
||||
"# true_te = lambda X: np.hstack([X[:, [0]]**2 + 1, np.ones((X.shape[0], n_treatments - 1))])\n",
|
||||
"# true_te = lambda X: np.hstack([X[:, [0]]>0, np.ones((X.shape[0], n_treatments - 1))])\n",
|
||||
"true_te = lambda X: np.hstack([(X[:, [0]]>0) * X[:, [0]],\n",
|
||||
" np.ones((X.shape[0], n_treatments - 1))*np.arange(1, n_treatments).reshape(1, -1)])\n",
|
||||
"def true_te(X):\n",
|
||||
" return np.hstack([(X[:, [0]] > 0) * X[:, [0]], np.ones((X.shape[0], n_treatments - 1)) * np.arange(1, n_treatments).reshape(1, -1)])\n",
|
||||
"X = np.random.normal(0, 1, size=(n_samples, n_features))\n",
|
||||
"W = np.random.normal(0, 1, size=(n_samples, n_features))\n",
|
||||
"T = np.random.normal(0, 1, size=(n_samples, n_treatments))\n",
|
||||
|
@ -1459,7 +1461,8 @@
|
|||
"n_samples = 2000\n",
|
||||
"n_features = 10\n",
|
||||
"n_treatments = 2\n",
|
||||
"true_te = lambda X: np.hstack([X[:, [0]]>0, np.ones((X.shape[0], n_treatments - 1))])\n",
|
||||
"def true_te(X):\n",
|
||||
" return np.hstack([X[:, [0]] > 0, np.ones((X.shape[0], n_treatments - 1))])\n",
|
||||
"Z = np.random.normal(0, 1, size=(n_samples, n_treatments))\n",
|
||||
"X = np.random.normal(0, 1, size=(n_samples, n_features))\n",
|
||||
"U = np.random.normal(0, .2, size=(n_samples, 1))\n",
|
||||
|
|
|
@ -69,7 +69,8 @@
|
|||
"np.random.seed(123)\n",
|
||||
"n_samples = 5000\n",
|
||||
"n_features = 10\n",
|
||||
"true_te = lambda X: (X[:, 0]>0) * X[:, 0]\n",
|
||||
"def true_te(X):\n",
|
||||
" return (X[:, 0] > 0) * X[:, 0]\n",
|
||||
"X = np.random.normal(0, 1, size=(n_samples, n_features))\n",
|
||||
"W = np.random.normal(0, 1, size=(n_samples, n_features))\n",
|
||||
"T = np.random.binomial(1, scipy.special.expit(X[:, 0]))\n",
|
||||
|
@ -293,8 +294,8 @@
|
|||
"n_features = 10\n",
|
||||
"n_treatments = 2\n",
|
||||
"n_outputs = 3\n",
|
||||
"true_te = lambda X: np.hstack([(X[:, [0]]>0) * X[:, [0]],\n",
|
||||
" np.ones((X.shape[0], n_treatments - 1))*np.arange(1, n_treatments).reshape(1, -1)])\n",
|
||||
"def true_te(X):\n",
|
||||
" return np.hstack([(X[:, [0]] > 0) * X[:, [0]], np.ones((X.shape[0], n_treatments - 1)) * np.arange(1, n_treatments).reshape(1, -1)])\n",
|
||||
"X = np.random.normal(0, 1, size=(n_samples, n_features))\n",
|
||||
"W = np.random.normal(0, 1, size=(n_samples, n_features))\n",
|
||||
"T = np.random.normal(0, 1, size=(n_samples, n_treatments))\n",
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -195,7 +195,8 @@
|
|||
}
|
||||
],
|
||||
"source": [
|
||||
"func = lambda X: 10\n",
|
||||
"def func(X):\n",
|
||||
" return 10\n",
|
||||
"n = 5000\n",
|
||||
"p = 10\n",
|
||||
"y, T, Z, X = dgp(n, p, func)"
|
||||
|
@ -241,8 +242,10 @@
|
|||
}
|
||||
],
|
||||
"source": [
|
||||
"model = lambda: LinearRegression()\n",
|
||||
"model_clf = lambda: LogisticRegression()"
|
||||
"def model():\n",
|
||||
" return LinearRegression()\n",
|
||||
"def model_clf():\n",
|
||||
" return LogisticRegression()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -732,7 +735,8 @@
|
|||
}
|
||||
],
|
||||
"source": [
|
||||
"func = lambda X: 10 * X[:, 0]"
|
||||
"def func(X):\n",
|
||||
" return 10 * X[:, 0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
@ -155,7 +155,6 @@ ignore = [
|
|||
"E402", # Module level import not at top of file
|
||||
"E713", # Test for membership should be 'not in'
|
||||
"E722", # Do not use bare 'except'
|
||||
"E731", # Do not assign a lambda expression, use a def
|
||||
"D100", # Missing docstring in public module
|
||||
"D101", # Missing docstring in public class
|
||||
"D102", # Missing docstring in public method
|
||||
|
|
Загрузка…
Ссылка в новой задаче