This commit is contained in:
YanZhangADS 2018-12-21 16:07:48 +00:00
Родитель e6ee0baa69
Коммит 6c362caa8a
4 изменённых файлов: 279 добавлений и 713 удалений

Просмотреть файл

@ -252,25 +252,25 @@
},
{
"cell_type": "code",
"execution_count": 15,
"execution_count": null,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(True, 'model_version', '2')"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"outputs": [],
"source": [
"model_version = str(model.version)\n",
"set_key(env_path, \"model_version\", model_version)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Clear GPU memory\n",
"from keras import backend as K\n",
"K.clear_session()"
]
},
{
"cell_type": "markdown",
"metadata": {},

Просмотреть файл

@ -11,7 +11,7 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@ -19,25 +19,40 @@
"from azureml.core.compute import AksCompute, ComputeTarget\n",
"from azureml.core.webservice import Webservice, AksWebservice\n",
"from azureml.core.image import Image\n",
"from azureml.core.model import Model"
"from azureml.core.model import Model\n",
"from dotenv import set_key, get_key, find_dotenv\n",
"import logging\n",
"from testing_utilities import img_url_to_json"
]
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0.1.74\n"
]
}
],
"outputs": [],
"source": [
"import azureml.core\n",
"print(azureml.core.VERSION)"
"import keras\n",
"import tensorflow\n",
"print(\"Keras: \", keras.__version__)\n",
"print(\"Tensorflow: \", tensorflow.__version__)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"env_path = find_dotenv(raise_error_if_not_found=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"_MODEL_NAME = get_key(env_path, 'model_name')"
]
},
{
@ -49,86 +64,112 @@
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Writing driver.py\n"
]
}
],
"source": [
"%%writefile driver.py\n",
"def init():\n",
" import tensorflow as tf\n",
" from resnet152 import ResNet152\n",
" from keras.preprocessing import image\n",
" from keras.applications.imagenet_utils import preprocess_input, decode_predictions\n",
"\n",
" import numpy as np\n",
" import timeit as t\n",
" import base64\n",
" import json\n",
" from PIL import Image, ImageOps\n",
" from io import BytesIO\n",
" import logging\n",
"\n",
" global model\n",
" model = ResNet152(weights='imagenet')\n",
" print('Model loaded')\n",
" \n",
"def run(inputString):\n",
" \n",
" import tensorflow as tf\n",
" from resnet152 import ResNet152\n",
" from keras.preprocessing import image\n",
" from keras.applications.imagenet_utils import preprocess_input, decode_predictions\n",
"\n",
" import numpy as np\n",
" import timeit as t\n",
" import base64\n",
" import json\n",
" from PIL import Image, ImageOps\n",
" from io import BytesIO\n",
" import logging \n",
" \n",
" model = ResNet152(weights='imagenet')\n",
" print('Model loaded')\n",
" \n",
" responses = []\n",
" base64Dict = json.loads(inputString)\n",
"\n",
" for k, v in base64Dict.items():\n",
" img_file_name, base64Img = k, v\n",
" decoded_img = base64.b64decode(base64Img)\n",
" img_buffer = BytesIO(decoded_img)\n",
" imageData = Image.open(img_buffer).convert(\"RGB\")\n",
" \n",
" # Evaluate the model using the input data\n",
" img = ImageOps.fit(imageData, (224,224), Image.ANTIALIAS)\n",
" img = np.array(img) # shape: (224, 224, 3)\n",
" \n",
" img = np.expand_dims(img, axis=0)\n",
" img = preprocess_input(img)\n",
" \n",
" preds = model.predict(img)\n",
" print('Predicted:', decode_predictions(preds, top=3))\n",
" resp = {img_file_name: str(decode_predictions(preds, top=3))}\n",
"\n",
" responses.append(resp)\n",
" return json.dumps(responses) "
]
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%run driver.py"
"%%writefile driver.py\n",
"\n",
"import tensorflow as tf\n",
"from resnet152 import ResNet152\n",
"from keras.preprocessing import image\n",
"from keras.applications.imagenet_utils import preprocess_input, decode_predictions\n",
"\n",
"from azureml.core.model import Model\n",
"\n",
"import numpy as np\n",
"import timeit as t\n",
"import base64\n",
"import json\n",
"from PIL import Image, ImageOps\n",
"from io import BytesIO\n",
"import logging\n",
"\n",
"number_results = 3\n",
"logger = logging.getLogger(\"model_driver\")\n",
"\n",
"def _base64img_to_numpy(base64_img_string):\n",
" decoded_img = base64.b64decode(base64_img_string)\n",
" img_buffer = BytesIO(decoded_img)\n",
" imageData = Image.open(img_buffer).convert(\"RGB\")\n",
" img = ImageOps.fit(imageData, (224, 224), Image.ANTIALIAS)\n",
" img = image.img_to_array(img)\n",
" return img\n",
"\n",
"def create_scoring_func():\n",
" \"\"\" Initialize ResNet 152 Model \n",
" \"\"\" \n",
" start = t.default_timer()\n",
" # model = ResNet152(weights='imagenet')\n",
" model_name = 'resnet_model'\n",
" model_path=Model.get_model_path(model_name)\n",
" model = ResNet152()\n",
" model.load_weights(model_path)\n",
" end = t.default_timer()\n",
" \n",
" loadTimeMsg = \"Model loading time: {0} ms\".format(round((end-start)*1000, 2))\n",
" logger.info(loadTimeMsg)\n",
" \n",
" def call_model(img_array):\n",
" img_array = np.expand_dims(img_array, axis=0)\n",
" img_array = preprocess_input(img_array)\n",
" preds = model.predict(img_array)\n",
" preds = decode_predictions(preds, top=number_results)[0] \n",
" return preds\n",
" \n",
" return call_model \n",
"\n",
"\n",
"def tuple_float_to_str(x):\n",
" \"\"\"tuple x = ('n02127052', 'lynx', 0.9816483) convert the 3rd element type numpy.float32 to string\n",
" return: ('n02127052', 'lynx', '0.9816483')\n",
" \"\"\"\n",
" a= list(x)\n",
" a[2]= str(a[2])\n",
" b= tuple(a)\n",
" return b\n",
" \n",
"def get_model_api():\n",
" logger = logging.getLogger(\"model_driver\")\n",
" scoring_func = create_scoring_func()\n",
" \n",
" def process_and_score(inputString):\n",
" \"\"\" Classify the input using the loaded model\n",
" \"\"\"\n",
" start = t.default_timer()\n",
"\n",
" base64Dict = json.loads(inputString) \n",
" for k, v in base64Dict.items():\n",
" img_file_name, base64Img = k, v \n",
" img_array = _base64img_to_numpy(base64Img)\n",
" preds = scoring_func(img_array)\n",
" \n",
" # convert the 3rd element type of preds items from numpy.float32 to string\n",
" for i, s in enumerate(preds):\n",
" preds[i] = tuple_float_to_str(s)\n",
"\n",
" responses = {img_file_name: preds}\n",
" \n",
" end = t.default_timer()\n",
" \n",
" logger.info(\"Predictions: {0}\".format(responses))\n",
" logger.info(\"Predictions took {0} ms\".format(round((end-start)*1000, 2)))\n",
" return (responses, \"Computed in {0} ms\".format(round((end-start)*1000, 2)))\n",
" return process_and_score\n",
"\n",
"def version():\n",
" return tf.__version__\n",
"\n",
"def init():\n",
" \"\"\" Initialise the model and scoring function\n",
" \"\"\"\n",
" global process_and_score\n",
" process_and_score = get_model_api()\n",
" \n",
"def run(raw_data):\n",
" \"\"\" Make a prediction based on the data passed in using the preloaded model\n",
" \"\"\"\n",
" return process_and_score(json.dumps(json.loads(raw_data)['input']))"
]
},
{
@ -141,41 +182,93 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Using TensorFlow backend.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model loaded\n",
"Predicted: [[('n02127052', 'lynx', 0.9816487), ('n02128385', 'leopard', 0.007744099), ('n02123159', 'tiger_cat', 0.0036861112)]]\n",
"[{\"220px-Lynx_lynx_poing.jpg\": \"[[('n02127052', 'lynx', 0.9816487), ('n02128385', 'leopard', 0.007744099), ('n02123159', 'tiger_cat', 0.0036861112)]]\"}]\n"
]
}
],
"outputs": [],
"source": [
"from io import BytesIO\n",
"from PIL import Image, ImageOps\n",
"import base64\n",
"import json\n",
"\n",
"img_path = '220px-Lynx_lynx_poing.jpg'\n",
"encoded = None\n",
"with open(img_path, 'rb') as file:\n",
" encoded = base64.b64encode(file.read())\n",
"img_dict = {img_path: encoded.decode('utf-8')}\n",
"body = json.dumps(img_dict)\n",
"\n",
"resp = run(body)\n",
"print(resp)"
"logging.basicConfig(level=logging.DEBUG)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%run driver.py"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Let's load the workspace."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ws = Workspace.from_config()\n",
"print(ws.name, ws.resource_group, ws.location, ws.subscription_id, sep=\"\\n\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"model_path = Model.get_model_path(_MODEL_NAME, _workspace=ws)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"IMAGEURL = \"https://upload.wikimedia.org/wikipedia/commons/thumb/6/68/Lynx_lynx_poing.jpg/220px-Lynx_lynx_poing.jpg\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Always make sure you don't have any lingering notebooks running.\n",
"init()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"jsonimg = img_url_to_json(IMAGEURL)\n",
"resp = run(jsonimg)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Clear GPU memory\n",
"from keras import backend as K\n",
"K.clear_session()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Next, we will [build a docker image with this modle driver and other supporting files](03__BuildImage.ipynb)."
]
}
],
@ -195,7 +288,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.6"
"version": "3.6.7"
}
},
"nbformat": 4,

Просмотреть файл

@ -252,25 +252,25 @@
},
{
"cell_type": "code",
"execution_count": 15,
"execution_count": null,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(True, 'model_version', '2')"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"outputs": [],
"source": [
"model_version = str(model.version)\n",
"set_key(env_path, \"model_version\", model_version)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Clear GPU memory\n",
"from keras import backend as K\n",
"K.clear_session()"
]
},
{
"cell_type": "markdown",
"metadata": {},

Просмотреть файл

@ -11,7 +11,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@ -27,25 +27,9 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Using TensorFlow backend.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Keras: 2.2.0\n",
"Tensorflow: 1.10.0\n"
]
}
],
"outputs": [],
"source": [
"import keras\n",
"import tensorflow\n",
@ -55,47 +39,20 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"env_path = find_dotenv(raise_error_if_not_found=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Let's load the workspace."
]
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"_MODEL_NAME = 'resnet_model'"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'model_resnet_weights.h5'"
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model_path"
"_MODEL_NAME = get_key(env_path, 'model_name')"
]
},
{
@ -105,246 +62,11 @@
"## Write and save driver script"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"import tensorflow as tf\n",
"from resnet152 import ResNet152\n",
"from keras.preprocessing import image\n",
"from keras.applications.imagenet_utils import preprocess_input, decode_predictions\n",
"\n",
"from azureml.core.model import Model\n",
"\n",
"import numpy as np\n",
"import timeit as t\n",
"import base64\n",
"import json\n",
"from PIL import Image, ImageOps\n",
"from io import BytesIO\n",
"\n",
"def _base64img_to_numpy(base64_img_string):\n",
" decoded_img = base64.b64decode(base64_img_string)\n",
" img_buffer = BytesIO(decoded_img)\n",
" imageData = Image.open(img_buffer).convert(\"RGB\")\n",
" img = ImageOps.fit(imageData, (224, 224), Image.ANTIALIAS)\n",
" img = image.img_to_array(img)\n",
" return img\n",
"\n",
"def call_model(img_array, model, number_results):\n",
" img_array = np.expand_dims(img_array, axis=0)\n",
" img_array = preprocess_input(img_array)\n",
" preds = model.predict(img_array)\n",
" preds = decode_predictions(preds, top=number_results)[0] \n",
" return preds\n",
"\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"model = ResNet152(weights='imagenet')"
]
},
{
"cell_type": "code",
"execution_count": 99,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"check 1.....\n",
"<class 'list'>\n",
"[('n02127052', 'lynx', 0.9816483), ('n02128385', 'leopard', 0.0077441484), ('n02123159', 'tiger_cat', 0.003686138)]\n",
"check 2 ....\n"
]
}
],
"source": [
"IMAGEURL = \"https://upload.wikimedia.org/wikipedia/commons/thumb/6/68/Lynx_lynx_poing.jpg/220px-Lynx_lynx_poing.jpg\"\n",
"jsonimg = img_url_to_json(IMAGEURL)\n",
"inputString = json.dumps(json.loads(jsonimg)['input'])\n",
"\n",
"base64Dict = json.loads(inputString) \n",
"for k, v in base64Dict.items():\n",
" img_file_name, base64Img = k, v \n",
"img_array = _base64img_to_numpy(base64Img)\n",
"\n",
"preds = call_model(img_array, model,3)\n",
"print(\"check 1.....\")\n",
"print(type(preds))\n",
"print(preds)\n",
"print(\"check 2 ....\")\n",
"responses = {img_file_name: preds}"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"numpy.float32"
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"type(preds[0][2])"
]
},
{
"cell_type": "code",
"execution_count": 100,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[('n02127052', 'lynx', 0.9816483),\n",
" ('n02128385', 'leopard', 0.0077441484),\n",
" ('n02123159', 'tiger_cat', 0.003686138)]"
]
},
"execution_count": 100,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"preds"
]
},
{
"cell_type": "code",
"execution_count": 77,
"metadata": {},
"outputs": [],
"source": [
"def tuple_float_to_str(x):\n",
" a= list(x)\n",
" a[2]= str(a[2])\n",
" b= tuple(a)\n",
" return b"
]
},
{
"cell_type": "code",
"execution_count": 82,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<class 'str'>\n"
]
}
],
"source": [
"n=preds[0]\n",
"m = tuple_float_to_str(n)\n",
"print(type(m[1]))"
]
},
{
"cell_type": "code",
"execution_count": 101,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[('n02127052', 'lynx', '0.9816483'), ('n02128385', 'leopard', '0.0077441484'), ('n02123159', 'tiger_cat', '0.003686138')]\n"
]
}
],
"source": [
"a = preds\n",
"for i, s in enumerate(a):\n",
" a[i] = tuple_float_to_str(s)\n",
"print(a)"
]
},
{
"cell_type": "code",
"execution_count": 98,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"str"
]
},
"execution_count": 98,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"type(a[0][2])"
]
},
{
"cell_type": "code",
"execution_count": 37,
"metadata": {},
"outputs": [],
"source": [
"preds_str = list(map(str, [x[0:3] for x in preds]))"
]
},
{
"cell_type": "code",
"execution_count": 38,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[\"('n02127052', 'lynx', 0.9816483)\",\n",
" \"('n02128385', 'leopard', 0.0077441484)\",\n",
" \"('n02123159', 'tiger_cat', 0.003686138)\"]"
]
},
"execution_count": 38,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"preds_str"
]
},
{
"cell_type": "code",
"execution_count": 102,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Overwriting driver.py\n"
]
}
],
"source": [
"%%writefile driver.py\n",
"\n",
@ -460,7 +182,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@ -469,7 +191,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@ -477,98 +199,17 @@
]
},
{
"cell_type": "code",
"execution_count": 8,
"cell_type": "markdown",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"DEBUG:cli.azure.cli.core:Current cloud config:\n",
"AzureCloud\n",
"DEBUG:cli.azure.cli.core.util:attempting to read file /home/mylogin/.azure/accessTokens.json as utf-8-sig\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - Authority:Performing instance discovery: ...\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - Authority:Performing static instance discovery\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - Authority:Authority validated via static instance discovery\n",
"INFO:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - TokenRequest:Getting token from cache with refresh if necessary.\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:finding with query keys: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Looking for potential cache entries: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Found 2 potential entries.\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Resource specific token found.\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Returning token from cache lookup, AccessTokenId: b'5Yp4aePv8wlOdT3DbuKlW6SWQkAdmB3i9YfdmTGXeZ8=', RefreshTokenId: b'8PFNusPOpR1LtpwD1CELDvT/Bphd26EJmfdAfj3G0Es='\n",
"INFO:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Cached token is expired at 2018-12-18 17:31:28.243609. Refreshing\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - TokenRequest:called to refresh a token from the cache\n",
"INFO:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - TokenRequest:Getting a new token from a refresh token\n",
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): login.microsoftonline.com:443\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Found the config file in: /data/home/mylogin/notebooks/yanzrepo/DistributedDeepLearning/AKSDeploymentTutorial_AML/Keras_Tensorflow/aml_config/config.json\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"DEBUG:urllib3.connectionpool:https://login.microsoftonline.com:443 \"POST /72f988bf-86f1-41af-91ab-2d7cd011db47/oauth2/token HTTP/1.1\" 200 3304\n",
"INFO:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - OAuth2Client:Get Token Server returned this correlation_id: 3e1f8a29-74f5-43f5-babe-db772eee2262\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Created new cache entry from refresh response.\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Removing entry.\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Adding entry AccessTokenId: b'4++Ojr5UOlnb/W1BfAzN6wNgMlDh4UpGKDzSwPkXnqM=', RefreshTokenId: b'y85yfXPddjM3gdr367feA0PQgfQvBbm8b+sK2JQYpsI='\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Updating 1 cached refresh tokens\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Remove many: 1\n",
"DEBUG:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Add many: 1\n",
"INFO:adal-python:3e1f8a29-74f5-43f5-babe-db772eee2262 - CacheDriver:Returning token refreshed after expiry.\n",
"DEBUG:cli.azure.cli.core:Current cloud config:\n",
"AzureCloud\n",
"DEBUG:cli.azure.cli.core:Current cloud config:\n",
"AzureCloud\n",
"DEBUG:adal-python:9569e5f8-01af-45f3-8d37-f00532fa3ceb - Authority:Performing instance discovery: ...\n",
"DEBUG:adal-python:9569e5f8-01af-45f3-8d37-f00532fa3ceb - Authority:Performing static instance discovery\n",
"DEBUG:adal-python:9569e5f8-01af-45f3-8d37-f00532fa3ceb - Authority:Authority validated via static instance discovery\n",
"INFO:adal-python:9569e5f8-01af-45f3-8d37-f00532fa3ceb - TokenRequest:Getting token from cache with refresh if necessary.\n",
"DEBUG:adal-python:9569e5f8-01af-45f3-8d37-f00532fa3ceb - CacheDriver:finding with query keys: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:9569e5f8-01af-45f3-8d37-f00532fa3ceb - CacheDriver:Looking for potential cache entries: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:9569e5f8-01af-45f3-8d37-f00532fa3ceb - CacheDriver:Found 2 potential entries.\n",
"DEBUG:adal-python:9569e5f8-01af-45f3-8d37-f00532fa3ceb - CacheDriver:Resource specific token found.\n",
"DEBUG:adal-python:9569e5f8-01af-45f3-8d37-f00532fa3ceb - CacheDriver:Returning token from cache lookup, AccessTokenId: b'4++Ojr5UOlnb/W1BfAzN6wNgMlDh4UpGKDzSwPkXnqM=', RefreshTokenId: b'y85yfXPddjM3gdr367feA0PQgfQvBbm8b+sK2JQYpsI='\n",
"DEBUG:cli.azure.cli.core:Current cloud config:\n",
"AzureCloud\n",
"DEBUG:adal-python:7227a363-36ec-4dba-b593-5b9c348f08b0 - Authority:Performing instance discovery: ...\n",
"DEBUG:adal-python:7227a363-36ec-4dba-b593-5b9c348f08b0 - Authority:Performing static instance discovery\n",
"DEBUG:adal-python:7227a363-36ec-4dba-b593-5b9c348f08b0 - Authority:Authority validated via static instance discovery\n",
"INFO:adal-python:7227a363-36ec-4dba-b593-5b9c348f08b0 - TokenRequest:Getting token from cache with refresh if necessary.\n",
"DEBUG:adal-python:7227a363-36ec-4dba-b593-5b9c348f08b0 - CacheDriver:finding with query keys: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:7227a363-36ec-4dba-b593-5b9c348f08b0 - CacheDriver:Looking for potential cache entries: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:7227a363-36ec-4dba-b593-5b9c348f08b0 - CacheDriver:Found 2 potential entries.\n",
"DEBUG:adal-python:7227a363-36ec-4dba-b593-5b9c348f08b0 - CacheDriver:Resource specific token found.\n",
"DEBUG:adal-python:7227a363-36ec-4dba-b593-5b9c348f08b0 - CacheDriver:Returning token from cache lookup, AccessTokenId: b'4++Ojr5UOlnb/W1BfAzN6wNgMlDh4UpGKDzSwPkXnqM=', RefreshTokenId: b'y85yfXPddjM3gdr367feA0PQgfQvBbm8b+sK2JQYpsI='\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:msrest.service_client:Accept header absent and forced to application/json\n",
"DEBUG:msrest.universal_http:Configuring redirects: allow=True, max=30\n",
"DEBUG:msrest.universal_http:Configuring request: timeout=100, verify=True, cert=None\n",
"DEBUG:msrest.universal_http:Configuring proxies: ''\n",
"DEBUG:msrest.universal_http:Evaluate proxies against ENV settings: True\n",
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): management.azure.com:443\n",
"DEBUG:urllib3.connectionpool:https://management.azure.com:443 \"GET /subscriptions/edf507a2-6235-46c5-b560-fd463ba2e771/resourceGroups/yanzamlworkspace/providers/Microsoft.MachineLearningServices/workspaces/yanzamlworkspace?api-version=2018-11-19 HTTP/1.1\" 200 None\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"yanzamlworkspace\n",
"yanzamlworkspace\n",
"eastus2\n",
"edf507a2-6235-46c5-b560-fd463ba2e771\n"
]
}
],
"source": [
"Let's load the workspace."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ws = Workspace.from_config()\n",
"print(ws.name, ws.resource_group, ws.location, ws.subscription_id, sep=\"\\n\")"
@ -576,165 +217,16 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"DEBUG:azureml.core.run:Could not load run context Failed to load a submitted run, if outside of an execution context, use project.start_run to initialize an azureml.core.Run., switching offline: False\n",
"DEBUG:azureml.core.run:Could not load the run context and allow_offline set to False\n",
"DEBUG:azureml.core.model:RunEnvironmentException: Failed to load a submitted run, if outside of an execution context, use project.start_run to initialize an azureml.core.Run.\n",
"DEBUG:cli.azure.cli.core:Current cloud config:\n",
"AzureCloud\n",
"DEBUG:adal-python:04afbf0b-b2bc-40ca-935f-91115e07403b - Authority:Performing instance discovery: ...\n",
"DEBUG:adal-python:04afbf0b-b2bc-40ca-935f-91115e07403b - Authority:Performing static instance discovery\n",
"DEBUG:adal-python:04afbf0b-b2bc-40ca-935f-91115e07403b - Authority:Authority validated via static instance discovery\n",
"INFO:adal-python:04afbf0b-b2bc-40ca-935f-91115e07403b - TokenRequest:Getting token from cache with refresh if necessary.\n",
"DEBUG:adal-python:04afbf0b-b2bc-40ca-935f-91115e07403b - CacheDriver:finding with query keys: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:04afbf0b-b2bc-40ca-935f-91115e07403b - CacheDriver:Looking for potential cache entries: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:04afbf0b-b2bc-40ca-935f-91115e07403b - CacheDriver:Found 2 potential entries.\n",
"DEBUG:adal-python:04afbf0b-b2bc-40ca-935f-91115e07403b - CacheDriver:Resource specific token found.\n",
"DEBUG:adal-python:04afbf0b-b2bc-40ca-935f-91115e07403b - CacheDriver:Returning token from cache lookup, AccessTokenId: b'4++Ojr5UOlnb/W1BfAzN6wNgMlDh4UpGKDzSwPkXnqM=', RefreshTokenId: b'y85yfXPddjM3gdr367feA0PQgfQvBbm8b+sK2JQYpsI='\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:azureml._restclient.clientbase.WorkerPool:Found extra key word arguments: {'max_workers': None}\n",
"INFO:azureml._restclient.clientbase:Created a worker pool for first use\n",
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): eastus2.modelmanagement.azureml.net:443\n",
"DEBUG:urllib3.connectionpool:https://eastus2.modelmanagement.azureml.net:443 \"GET /api/subscriptions/edf507a2-6235-46c5-b560-fd463ba2e771/resourceGroups/yanzamlworkspace/providers/Microsoft.MachineLearningServices/workspaces/yanzamlworkspace/models?api-version=2018-11-19&orderBy=CreatedAtDesc&count=1&name=resnet_model HTTP/1.1\" 200 None\n",
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): eastus2.modelmanagement.azureml.net:443\n",
"DEBUG:urllib3.connectionpool:https://eastus2.modelmanagement.azureml.net:443 \"GET /api/subscriptions/edf507a2-6235-46c5-b560-fd463ba2e771/resourceGroups/yanzamlworkspace/providers/Microsoft.MachineLearningServices/workspaces/yanzamlworkspace/models?api-version=2018-11-19&orderBy=CreatedAtDesc&count=1&name=resnet_model&$skipToken=K1JJRDpoMkpyQUliMjBnaGQ1Z0lBQUFBQUFBPT0jUlQ6MSNUUkM6MSNSVEQ6eC9VeU1ERTRMVEV4TFRJM1ZERTFPak0wT2pNeExqRTNORGs1TWlzd01Eb3dNQT09I0ZQQzpBZ0VMRVFzR0FGMm1XWUIza2c4SUFGbVpRU1lFQVVpQUVCUUFMSUFSZ0NTT080a0NnTCtBazRDVWdMU0Npb0lSQmdDd2lkR0FVb0E9 HTTP/1.1\" 200 None\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:azureml.core.model:Found model version 2\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:msrest.service_client:Accept header absent and forced to application/json\n",
"DEBUG:cli.azure.cli.core:Current cloud config:\n",
"AzureCloud\n",
"DEBUG:adal-python:7d78a8bf-a075-4dcf-b372-5eac03904170 - Authority:Performing instance discovery: ...\n",
"DEBUG:adal-python:7d78a8bf-a075-4dcf-b372-5eac03904170 - Authority:Performing static instance discovery\n",
"DEBUG:adal-python:7d78a8bf-a075-4dcf-b372-5eac03904170 - Authority:Authority validated via static instance discovery\n",
"INFO:adal-python:7d78a8bf-a075-4dcf-b372-5eac03904170 - TokenRequest:Getting token from cache with refresh if necessary.\n",
"DEBUG:adal-python:7d78a8bf-a075-4dcf-b372-5eac03904170 - CacheDriver:finding with query keys: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:7d78a8bf-a075-4dcf-b372-5eac03904170 - CacheDriver:Looking for potential cache entries: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:7d78a8bf-a075-4dcf-b372-5eac03904170 - CacheDriver:Found 2 potential entries.\n",
"DEBUG:adal-python:7d78a8bf-a075-4dcf-b372-5eac03904170 - CacheDriver:Resource specific token found.\n",
"DEBUG:adal-python:7d78a8bf-a075-4dcf-b372-5eac03904170 - CacheDriver:Returning token from cache lookup, AccessTokenId: b'4++Ojr5UOlnb/W1BfAzN6wNgMlDh4UpGKDzSwPkXnqM=', RefreshTokenId: b'y85yfXPddjM3gdr367feA0PQgfQvBbm8b+sK2JQYpsI='\n",
"DEBUG:msrest.http_logger:Request URL: 'https://eastus2.modelmanagement.azureml.net/api/subscriptions/edf507a2-6235-46c5-b560-fd463ba2e771/resourceGroups/yanzamlworkspace/providers/Microsoft.MachineLearningServices/workspaces/yanzamlworkspace/assets/faf941d054554c89a57df701b07c2039?api-version=2018-03-01-preview'\n",
"DEBUG:msrest.http_logger:Request method: 'GET'\n",
"DEBUG:msrest.http_logger:Request headers:\n",
"DEBUG:msrest.http_logger: 'Accept': 'application/json'\n",
"DEBUG:msrest.http_logger: 'Content-Type': 'application/json; charset=utf-8'\n",
"DEBUG:msrest.http_logger: 'User-Agent': 'python/3.6.7 (Linux-4.15.0-1023-azure-x86_64-with-debian-stretch-sid) msrest/0.6.2 azureml._restclient/core.1.0.2'\n",
"DEBUG:msrest.http_logger:Request body:\n",
"DEBUG:msrest.http_logger:None\n",
"DEBUG:msrest.universal_http:Configuring redirects: allow=True, max=30\n",
"DEBUG:msrest.universal_http:Configuring request: timeout=100, verify=True, cert=None\n",
"DEBUG:msrest.universal_http:Configuring proxies: ''\n",
"DEBUG:msrest.universal_http:Evaluate proxies against ENV settings: True\n",
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): eastus2.modelmanagement.azureml.net:443\n",
"DEBUG:urllib3.connectionpool:https://eastus2.modelmanagement.azureml.net:443 \"GET /api/subscriptions/edf507a2-6235-46c5-b560-fd463ba2e771/resourceGroups/yanzamlworkspace/providers/Microsoft.MachineLearningServices/workspaces/yanzamlworkspace/assets/faf941d054554c89a57df701b07c2039?api-version=2018-03-01-preview HTTP/1.1\" 200 None\n",
"DEBUG:msrest.http_logger:Response status: 200\n",
"DEBUG:msrest.http_logger:Response headers:\n",
"DEBUG:msrest.http_logger: 'Date': 'Tue, 18 Dec 2018 18:07:25 GMT'\n",
"DEBUG:msrest.http_logger: 'Content-Type': 'application/json; charset=utf-8'\n",
"DEBUG:msrest.http_logger: 'Transfer-Encoding': 'chunked'\n",
"DEBUG:msrest.http_logger: 'Connection': 'keep-alive'\n",
"DEBUG:msrest.http_logger: 'api-supported-versions': '2018-03-01-preview, 2018-11-19'\n",
"DEBUG:msrest.http_logger: 'x-ms-client-request-id': 'd00238067df743aba4af3bd937c77de2'\n",
"DEBUG:msrest.http_logger: 'x-ms-client-session-id': ''\n",
"DEBUG:msrest.http_logger: 'Strict-Transport-Security': 'max-age=15724800; includeSubDomains; preload'\n",
"DEBUG:msrest.http_logger: 'X-Content-Type-Options': 'nosniff'\n",
"DEBUG:msrest.http_logger: 'Content-Encoding': 'gzip'\n",
"DEBUG:msrest.http_logger:Response content:\n",
"DEBUG:msrest.http_logger:{\"id\":\"faf941d054554c89a57df701b07c2039\",\"name\":\"resnet_model\",\"description\":\"resnet_model saved during run None in project None\",\"artifacts\":[{\"id\":null,\"prefix\":\"LocalUpload/181210T175931-c14a35b2/resnet_model.tar.gz\"}],\"tags\":null,\"kvTags\":{},\"properties\":{},\"runid\":null,\"projectid\":null,\"meta\":null,\"createdTime\":\"2018-12-10T17:59:37.0837561Z\"}\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:azureml.core.model:Asset has artifact {'additional_properties': {}, 'id': None, 'prefix': 'LocalUpload/181210T175931-c14a35b2/resnet_model.tar.gz'}\n",
"DEBUG:azureml.core.model:Artifact has prefix id LocalUpload/181210T175931-c14a35b2/resnet_model.tar.gz\n",
"DEBUG:azureml.ArtifactsClient:Fetching files for prefix in LocalUpload, 181210T175931-c14a35b2, resnet_model.tar.gz\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"DEBUG:azureml._restclient.clientbase.WorkerPool.CreateFutureFunc: _execute_with_base_arguments:[START]\n",
"DEBUG:msrest.service_client:Accept header absent and forced to application/json\n",
"DEBUG:azureml._restclient.clientbase.WorkerPool.CreateFutureFunc: _execute_with_base_arguments:[STOP]\n",
"DEBUG:msrest.universal_http.requests:Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\n",
"DEBUG:azureml.ArtifactsClient.list_sas_by_prefix:Using basic handler - no exception handling\n",
"DEBUG:azureml.ArtifactsClient.list_sas_by_prefix.WaitingTask:[START]\n",
"DEBUG:cli.azure.cli.core:Current cloud config:\n",
"AzureCloud\n",
"DEBUG:adal-python:bb575216-752c-4bf6-829c-ce186798c44e - Authority:Performing instance discovery: ...\n",
"DEBUG:adal-python:bb575216-752c-4bf6-829c-ce186798c44e - Authority:Performing static instance discovery\n",
"DEBUG:adal-python:bb575216-752c-4bf6-829c-ce186798c44e - Authority:Authority validated via static instance discovery\n",
"INFO:adal-python:bb575216-752c-4bf6-829c-ce186798c44e - TokenRequest:Getting token from cache with refresh if necessary.\n",
"DEBUG:adal-python:bb575216-752c-4bf6-829c-ce186798c44e - CacheDriver:finding with query keys: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:bb575216-752c-4bf6-829c-ce186798c44e - CacheDriver:Looking for potential cache entries: {'_clientId': '...', 'userId': '...'}\n",
"DEBUG:adal-python:bb575216-752c-4bf6-829c-ce186798c44e - CacheDriver:Found 2 potential entries.\n",
"DEBUG:adal-python:bb575216-752c-4bf6-829c-ce186798c44e - CacheDriver:Resource specific token found.\n",
"DEBUG:adal-python:bb575216-752c-4bf6-829c-ce186798c44e - CacheDriver:Returning token from cache lookup, AccessTokenId: b'4++Ojr5UOlnb/W1BfAzN6wNgMlDh4UpGKDzSwPkXnqM=', RefreshTokenId: b'y85yfXPddjM3gdr367feA0PQgfQvBbm8b+sK2JQYpsI='\n",
"DEBUG:msrest.http_logger:Request URL: 'https://eastus2.experiments.azureml.net/artifact/v1.0/subscriptions/edf507a2-6235-46c5-b560-fd463ba2e771/resourceGroups/yanzamlworkspace/providers/Microsoft.MachineLearningServices/workspaces/yanzamlworkspace/artifacts/prefix/contentinfo/LocalUpload/181210T175931-c14a35b2/resnet_model.tar.gz'\n",
"DEBUG:msrest.http_logger:Request method: 'GET'\n",
"DEBUG:msrest.http_logger:Request headers:\n",
"DEBUG:msrest.http_logger: 'Accept': 'application/json'\n",
"DEBUG:msrest.http_logger: 'Content-Type': 'application/json; charset=utf-8'\n",
"DEBUG:msrest.http_logger: 'User-Agent': 'python/3.6.7 (Linux-4.15.0-1023-azure-x86_64-with-debian-stretch-sid) msrest/0.6.2 azureml._restclient/core.1.0.2'\n",
"DEBUG:msrest.http_logger:Request body:\n",
"DEBUG:msrest.http_logger:None\n",
"DEBUG:msrest.universal_http:Configuring redirects: allow=True, max=30\n",
"DEBUG:msrest.universal_http:Configuring request: timeout=100, verify=True, cert=None\n",
"DEBUG:msrest.universal_http:Configuring proxies: ''\n",
"DEBUG:msrest.universal_http:Evaluate proxies against ENV settings: True\n",
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): eastus2.experiments.azureml.net:443\n",
"DEBUG:urllib3.connectionpool:https://eastus2.experiments.azureml.net:443 \"GET /artifact/v1.0/subscriptions/edf507a2-6235-46c5-b560-fd463ba2e771/resourceGroups/yanzamlworkspace/providers/Microsoft.MachineLearningServices/workspaces/yanzamlworkspace/artifacts/prefix/contentinfo/LocalUpload/181210T175931-c14a35b2/resnet_model.tar.gz HTTP/1.1\" 200 None\n",
"DEBUG:msrest.http_logger:Response status: 200\n",
"DEBUG:msrest.http_logger:Response headers:\n",
"DEBUG:msrest.http_logger: 'Date': 'Tue, 18 Dec 2018 18:07:25 GMT'\n",
"DEBUG:msrest.http_logger: 'Content-Type': 'application/json; charset=utf-8'\n",
"DEBUG:msrest.http_logger: 'Transfer-Encoding': 'chunked'\n",
"DEBUG:msrest.http_logger: 'Connection': 'keep-alive'\n",
"DEBUG:msrest.http_logger: 'Request-Context': 'appId=cid-v1:2d2e8e63-272e-4b3c-8598-4ee570a0e70d'\n",
"DEBUG:msrest.http_logger: 'x-ms-client-request-id': '41d3c9294171489ca254f13bd4a70dbf'\n",
"DEBUG:msrest.http_logger: 'x-ms-client-session-id': ''\n",
"DEBUG:msrest.http_logger: 'Strict-Transport-Security': 'max-age=15724800; includeSubDomains; preload'\n",
"DEBUG:msrest.http_logger: 'X-Content-Type-Options': 'nosniff'\n",
"DEBUG:msrest.http_logger: 'Content-Encoding': 'gzip'\n",
"DEBUG:msrest.http_logger:Response content:\n",
"DEBUG:msrest.http_logger:{\n",
" \"value\": [\n",
" {\n",
" \"contentUri\": \"https://yanzamlwstorageyotvwlmq.blob.core.windows.net/azureml/LocalUpload/181210T175931-c14a35b2/resnet_model.tar.gz?sv=2018-03-28&sr=b&sig=EFDXCi1Jzt5VmRRjvxnQ9HwrBxmgG084NGXE954nMxc%3D&st=2018-12-18T17%3A57%3A25Z&se=2018-12-19T02%3A07%3A25Z&sp=r\",\n",
" \"origin\": \"LocalUpload\",\n",
" \"container\": \"181210T175931-c14a35b2\",\n",
" \"path\": \"resnet_model.tar.gz\"\n",
" }\n",
" ],\n",
" \"continuationToken\": null,\n",
" \"count\": null\n",
"}\n",
"DEBUG:azureml.ArtifactsClient.list_sas_by_prefix.WaitingTask:[STOP]\n",
"DEBUG:azureml.core.model:prefix id LocalUpload/181210T175931-c14a35b2/resnet_model.tar.gz has path resnet_model.tar.gz\n",
"DEBUG:azureml.core.model:prefix to strip from path resnet_model.tar.gz is \n",
"DEBUG:azureml.core.model:sas_to_download_path map is OrderedDict([('https://yanzamlwstorageyotvwlmq.blob.core.windows.net/azureml/LocalUpload/181210T175931-c14a35b2/resnet_model.tar.gz?sv=2018-03-28&sr=b&sig=EFDXCi1Jzt5VmRRjvxnQ9HwrBxmgG084NGXE954nMxc%3D&st=2018-12-18T17%3A57%3A25Z&se=2018-12-19T02%3A07%3A25Z&sp=r', 'resnet_model.tar.gz')])\n",
"DEBUG:azureml._file_utils.file_utils:downloading file to azureml-models/resnet_model/2/resnet_model.tar.gz, with max_retries: 0, stream: True, and protocol: https\n",
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): yanzamlwstorageyotvwlmq.blob.core.windows.net:443\n",
"DEBUG:urllib3.connectionpool:https://yanzamlwstorageyotvwlmq.blob.core.windows.net:443 \"GET /azureml/LocalUpload/181210T175931-c14a35b2/resnet_model.tar.gz?sv=2018-03-28&sr=b&sig=EFDXCi1Jzt5VmRRjvxnQ9HwrBxmgG084NGXE954nMxc%3D&st=2018-12-18T17%3A57%3A25Z&se=2018-12-19T02%3A07%3A25Z&sp=r HTTP/1.1\" 200 224608891\n",
"DEBUG:azureml.core.model:Unpacking model azureml-models/resnet_model/2/resnet_model.tar.gz\n"
]
}
],
"outputs": [],
"source": [
"model_path = Model.get_model_path(_MODEL_NAME, _workspace=ws)"
]
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@ -745,20 +237,9 @@
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"DEBUG:azureml.core.run:Could not load run context Failed to load a submitted run, if outside of an execution context, use project.start_run to initialize an azureml.core.Run., switching offline: False\n",
"DEBUG:azureml.core.run:Could not load the run context and allow_offline set to False\n",
"DEBUG:azureml.core.model:RunEnvironmentException: Failed to load a submitted run, if outside of an execution context, use project.start_run to initialize an azureml.core.Run.\n",
"DEBUG:azureml.core.model:version is None. Latest version is 2\n",
"DEBUG:azureml.core.model:Found model path at azureml-models/resnet_model/2/model_resnet_weights.h5\n"
]
}
],
"outputs": [],
"source": [
"# Always make sure you don't have any lingering notebooks running.\n",
"init()"
]
},
@ -772,31 +253,23 @@
"resp = run(jsonimg)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"jsonimg = img_url_to_json(IMAGEURL)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"with open(\"jsonimg\", \"w\") as f:\n",
" f.write(jsonimg)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
"source": [
"# Clear GPU memory\n",
"from keras import backend as K\n",
"K.clear_session()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Next, we will [build a docker image with this modle driver and other supporting files](03__BuildImage.ipynb)."
]
}
],
"metadata": {