* resolve format issues

* update log path and tensorboard path

* remove subprocess import

* fetch common utils from chenhui/dilatedcnn_windows

* update notebook

* removed explain module and added notebooks module

* get updated ci yml files

* updated kernel name

Former-commit-id: deac0cce96
This commit is contained in:
Chenhui Hu 2020-04-03 11:15:49 -04:00 коммит произвёл GitHub
Родитель c2c17e4748
Коммит c52fb1c83d
3 изменённых файлов: 17 добавлений и 10 удалений

Просмотреть файл

@ -55,6 +55,13 @@
"%autoreload 2"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
">Note: If you run into any issue with installing and enabling the AzureML widgets below, please *uncomment* the first line in the following cell to manually install `azureml-widgets`."
]
},
{
"cell_type": "code",
"execution_count": 2,
@ -81,6 +88,7 @@
],
"source": [
"# Install and enable AzureML widgets\n",
"#!pip -q install azureml-widgets==1.0.85\n",
"!jupyter nbextension install --py --user azureml.widgets\n",
"!jupyter nbextension enable --py --user azureml.widgets"
]
@ -106,7 +114,6 @@
"import shutil\n",
"import azureml\n",
"import requests\n",
"import subprocess\n",
"import numpy as np\n",
"from azureml.core import (\n",
" Experiment,\n",
@ -130,7 +137,7 @@
")\n",
"from azureml.core.webservice import AciWebservice\n",
"from azureml.core.model import Model, InferenceConfig\n",
"from fclib.common.utils import git_repo_path\n",
"from fclib.common.utils import git_repo_path, module_path\n",
"from fclib.azureml.azureml_utils import (\n",
" get_or_create_workspace,\n",
" get_or_create_amlcompute,\n",
@ -280,8 +287,7 @@
"outputs": [],
"source": [
"# Get Python interpreter path\n",
"python_path = subprocess.check_output(\"which python\", shell=True)\n",
"python_path = python_path.decode(\"utf-8\")[:-1]\n",
"python_path = module_path(\"forecasting_env\", \"python\")\n",
"\n",
"# Configure local, user managed environment\n",
"run_config_user_managed = RunConfiguration()\n",

Просмотреть файл

@ -7,13 +7,14 @@ import papermill as pm
import scrapbook as sb
ABS_TOL = 5.0
KERNEL = "forecasting_env"
@pytest.mark.integration
def test_lightgbm_quick_start(notebooks):
notebook_path = notebooks["lightgbm_quick_start"]
output_notebook_path = os.path.join(os.path.dirname(notebook_path), "output.ipynb")
pm.execute_notebook(notebook_path, output_notebook_path, kernel_name="forecast_cpu")
pm.execute_notebook(notebook_path, output_notebook_path, kernel_name=KERNEL)
nb = sb.read_notebook(output_notebook_path)
df = nb.scraps.dataframe
assert df.shape[0] == 1
@ -26,7 +27,7 @@ def test_autoarima_quick_start(notebooks):
notebook_path = notebooks["autoarima_quick_start"]
output_notebook_path = os.path.join(os.path.dirname(notebook_path), "output.ipynb")
pm.execute_notebook(
notebook_path, output_notebook_path, kernel_name="forecast_cpu", parameters=dict(STORE_SUBSET=True),
notebook_path, output_notebook_path, kernel_name=KERNEL, parameters=dict(STORE_SUBSET=True),
)
nb = sb.read_notebook(output_notebook_path)
df = nb.scraps.dataframe
@ -41,7 +42,7 @@ def test_lightgbm_multi_round(notebooks):
notebook_path = notebooks["lightgbm_multi_round"]
output_notebook_path = os.path.join(os.path.dirname(notebook_path), "output.ipynb")
pm.execute_notebook(
notebook_path, output_notebook_path, kernel_name="forecast_cpu", parameters=dict(N_SPLITS=1),
notebook_path, output_notebook_path, kernel_name=KERNEL, parameters=dict(N_SPLITS=1),
)
nb = sb.read_notebook(output_notebook_path)
df = nb.scraps.dataframe
@ -55,7 +56,7 @@ def test_dilatedcnn_multi_round(notebooks):
notebook_path = notebooks["dilatedcnn_multi_round"]
output_notebook_path = os.path.join(os.path.dirname(notebook_path), "output.ipynb")
pm.execute_notebook(
notebook_path, output_notebook_path, kernel_name="forecast_cpu", parameters=dict(N_SPLITS=2),
notebook_path, output_notebook_path, kernel_name=KERNEL, parameters=dict(N_SPLITS=2),
)
nb = sb.read_notebook(output_notebook_path)
df = nb.scraps.dataframe
@ -69,7 +70,7 @@ def test_autoarima_multi_round(notebooks):
notebook_path = notebooks["autoarima_multi_round"]
output_notebook_path = os.path.join(os.path.dirname(notebook_path), "output.ipynb")
pm.execute_notebook(
notebook_path, output_notebook_path, kernel_name="forecast_cpu", parameters=dict(N_SPLITS=2, STORE_SUBSET=True),
notebook_path, output_notebook_path, kernel_name=KERNEL, parameters=dict(N_SPLITS=2, STORE_SUBSET=True),
)
nb = sb.read_notebook(output_notebook_path)
df = nb.scraps.dataframe

Просмотреть файл

@ -40,4 +40,4 @@ dependencies:
- nteract-scrapbook==0.3.1
- statsmodels>=0.11.1
- pmdarima>=1.1.1
- azureml-sdk[explain,automl]==1.0.85
- azureml-sdk[automl,notebooks]==1.0.85