cleanup
This commit is contained in:
Родитель
f7c3331551
Коммит
1ee8c2156d
|
@ -1,98 +0,0 @@
|
|||
from farm.data_handler.data_silo import DataSilo
|
||||
from farm.data_handler.processor import NERProcessor
|
||||
from farm.modeling.optimization import initialize_optimizer
|
||||
from farm.infer import Inferencer
|
||||
from farm.modeling.adaptive_model import AdaptiveModel
|
||||
from farm.modeling.language_model import LanguageModel
|
||||
from farm.modeling.prediction_head import TokenClassificationHead
|
||||
from farm.modeling.tokenization import Tokenizer
|
||||
from farm.train import Trainer
|
||||
from farm.utils import set_all_seeds, MLFlowLogger, initialize_device_settings
|
||||
|
||||
# Custom functions
|
||||
import sys
|
||||
sys.path.append('./code')
|
||||
import helper as he
|
||||
import data as dt
|
||||
|
||||
# Load custom
|
||||
logger = he.get_logger(location=__name__)
|
||||
dt = dt.Data()
|
||||
|
||||
##########################
|
||||
########## Settings
|
||||
##########################
|
||||
set_all_seeds(seed=42)
|
||||
device, n_gpu = initialize_device_settings(use_cuda=True)
|
||||
n_epochs = 1
|
||||
batch_size = 32
|
||||
evaluate_every = 100
|
||||
lang_model = "bert-base-german-cased"
|
||||
|
||||
# 1.Create a tokenizer
|
||||
tokenizer = Tokenizer.load(
|
||||
pretrained_model_name_or_path=lang_model,
|
||||
do_lower_case=False)
|
||||
|
||||
# 2. Create a DataProcessor that handles all the conversion from raw text into a pytorch Dataset
|
||||
ner_labels = ["[PAD]", "X", "O", "B-MISC", "I-MISC", "B-PER", "I-PER", "B-ORG", "I-ORG", "B-LOC", "I-LOC", "B-OTH", "I-OTH"]
|
||||
|
||||
processor = NERProcessor(
|
||||
tokenizer=tokenizer, max_seq_len=128, data_dir="../data/conll03-de", metric="seq_f1",label_list=ner_labels
|
||||
)
|
||||
|
||||
# 3. Create a DataSilo that loads several datasets (train/dev/test), provides DataLoaders for them and calculates a few descriptive statistics of our datasets
|
||||
data_silo = DataSilo(processor=processor, batch_size=batch_size)
|
||||
|
||||
# 4. Create an AdaptiveModel
|
||||
# a) which consists of a pretrained language model as a basis
|
||||
language_model = LanguageModel.load(lang_model)
|
||||
# b) and a prediction head on top that is suited for our task => NER
|
||||
prediction_head = TokenClassificationHead(task_name="ner",
|
||||
layer_dims=[768, len(processor.tasks["ner"]["label_list"])])
|
||||
|
||||
model = AdaptiveModel(
|
||||
language_model=language_model,
|
||||
prediction_heads=[prediction_head],
|
||||
embeds_dropout_prob=0.1,
|
||||
lm_output_types=["per_token"],
|
||||
device=device,
|
||||
)
|
||||
|
||||
# 5. Create an optimizer
|
||||
model, optimizer, lr_schedule = initialize_optimizer(
|
||||
model=model,
|
||||
learning_rate=2e-5,
|
||||
n_batches=len(data_silo.loaders["train"]),
|
||||
n_epochs=n_epochs,
|
||||
device=device,
|
||||
)
|
||||
|
||||
# 6. Feed everything to the Trainer, which keeps care of growing our model into powerful plant and evaluates it from time to time
|
||||
trainer = Trainer(
|
||||
optimizer=optimizer,
|
||||
data_silo=data_silo,
|
||||
epochs=n_epochs,
|
||||
n_gpu=n_gpu,
|
||||
lr_schedule=lr_schedule,
|
||||
evaluate_every=evaluate_every,
|
||||
device=device,
|
||||
)
|
||||
|
||||
# 7. Let it grow
|
||||
model = trainer.train(model)
|
||||
|
||||
# 8. Hooray! You have a model. Store it:
|
||||
save_dir = "saved_models/bert-german-ner-tutorial"
|
||||
model.save(save_dir)
|
||||
processor.save(save_dir)
|
||||
|
||||
|
||||
# 9. Load it & harvest your fruits (Inference)
|
||||
basic_texts = [
|
||||
{"text": "Schartau sagte dem Tagesspiegel, dass Fischer ein Idiot sei"},
|
||||
{"text": "Martin Müller spielt Handball in Berlin"},
|
||||
]
|
||||
model = Inferencer.load(save_dir)
|
||||
result = model.inference_from_dicts(dicts=basic_texts)
|
||||
print(result)
|
75
deploy/cd.py
75
deploy/cd.py
|
@ -1,75 +0,0 @@
|
|||
#MAIN for deployment
|
||||
##PARAMS
|
||||
# update assets (boolean)
|
||||
# target (aci, aks,...)
|
||||
|
||||
|
||||
############################################
|
||||
##### Task 1
|
||||
############################################
|
||||
|
||||
# ## Experiment
|
||||
# experiment_name = run_type_str + "-hyper"
|
||||
# exp = Experiment(workspace = ws, name = experiment_name)
|
||||
# ## Parameters
|
||||
# ### Fixed parameters
|
||||
# script_params = {
|
||||
# '--task' : 1,
|
||||
# '--run_version': run_version,
|
||||
# '--run_type': run_type,
|
||||
# '--per_gpu_train_batch_size' : 64,
|
||||
# '--max_seq_length' : 128,
|
||||
# '--num_train_epochs' : 4,
|
||||
# '--do_train' : ''
|
||||
# }
|
||||
# ### Task parameters
|
||||
# script_params_task1 = {
|
||||
# '--evaluate_during_training': '',
|
||||
# '--eval_all_checkpoints':'',
|
||||
# '--cleanup':''
|
||||
# }
|
||||
# script_params_task = {**script_params, **script_params_task1}
|
||||
# ### Hyperparameters params
|
||||
# param_sampling = BayesianParameterSampling( {
|
||||
# '--learning_rate' : choice(2e-5, 4e-5, 5e-5, 6e-5, 8e-5)
|
||||
# })
|
||||
# ## Prepare task image
|
||||
# est_task1 = PyTorch(source_directory = script_folder,
|
||||
# compute_target = compute_target,
|
||||
# script_params = script_params_task,
|
||||
# entry_script = 'code/train.py',
|
||||
# pip_packages = pip_packages,
|
||||
# use_gpu = True)
|
||||
# ## Prepare HyperDrive Config
|
||||
# hdc = HyperDriveConfig(estimator=est_task1,
|
||||
# hyperparameter_sampling = param_sampling,
|
||||
# policy = None, # NOTE: not possible for bayesian
|
||||
# primary_metric_name = 'eval_f1',
|
||||
# primary_metric_goal = PrimaryMetricGoal.MAXIMIZE,
|
||||
# max_total_runs = 20,
|
||||
# max_concurrent_runs = 1)
|
||||
# ## Run hyperparameter tuning
|
||||
# hyperdrive_run = exp.submit(config=hdc)
|
||||
# hyperdrive_run.wait_for_completion(show_output = False)
|
||||
# ## Get Results
|
||||
# best_run = hyperdrive_run.get_best_run_by_primary_metric()
|
||||
# ## Experiment
|
||||
# experiment_name = run_type_str + "-train"
|
||||
# exp = Experiment(workspace = ws, name = experiment_name)
|
||||
# #Parameters determined by hyperparams
|
||||
# script_params_hyper = {
|
||||
# '--do_upload' : '',
|
||||
# '--do_eval':'',
|
||||
# '--learning_rate' : get_best_argument(best_run.get_details(), 'learning_rate')
|
||||
# }
|
||||
# script_params_best = {**script_params, **script_params_hyper}
|
||||
# script_params_best
|
||||
# est_best = PyTorch(source_directory = script_folder,
|
||||
# compute_target = compute_target,
|
||||
# script_params = script_params_best,
|
||||
# entry_script = 'code/train.py',
|
||||
# pip_packages = pip_packages,
|
||||
# use_gpu = True)
|
||||
# # # Run single
|
||||
# run = exp.submit(est_best)
|
||||
# run.wait_for_completion(show_output = False)
|
|
@ -1,5 +0,0 @@
|
|||
#create ml workspace
|
||||
|
||||
# Create RG
|
||||
|
||||
# Create Cluster
|
|
@ -1,376 +0,0 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import zipfile\n",
|
||||
"import os\n",
|
||||
"import json\n",
|
||||
"\n",
|
||||
"from azureml.core.authentication import InteractiveLoginAuthentication, MsiAuthentication\n",
|
||||
"from azureml.core import Workspace\n",
|
||||
"from azureml.core import Model\n",
|
||||
"from azureml.core.resource_configuration import ResourceConfiguration\n",
|
||||
"from azureml.core.webservice import Webservice, AciWebservice\n",
|
||||
"from azureml.core import Environment\n",
|
||||
"from azureml.core.conda_dependencies import CondaDependencies\n",
|
||||
"from azureml.core import Webservice\n",
|
||||
"from azureml.core.webservice import AciWebservice\n",
|
||||
"from azureml.core.model import InferenceConfig\n",
|
||||
"from azureml.exceptions import WebserviceException"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"os.chdir('..')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Custom functions\n",
|
||||
"import sys\n",
|
||||
"sys.path.append('./code')\n",
|
||||
"import helper as he\n",
|
||||
"import data as dt\n",
|
||||
"import custom as cu"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\ipykernel_launcher.py:40: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead\n",
|
||||
"WARNING - [INFO] Loaded Workspace nlp-ml (nlp)\n",
|
||||
"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\ipykernel_launcher.py:79: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead\n",
|
||||
"WARNING - [INFO] Using existing model -> nlp_en_test\n",
|
||||
"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\ipykernel_launcher.py:82: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead\n",
|
||||
"WARNING - [INFO] \t Model Name: nlp_en_test\n",
|
||||
"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\ipykernel_launcher.py:83: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead\n",
|
||||
"WARNING - [INFO] \t Model Version: 4\n",
|
||||
"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\ipykernel_launcher.py:132: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead\n",
|
||||
"WARNING - [INFO] Creating web service\n",
|
||||
"ERROR - Internal Python error in the inspect module.\n",
|
||||
"Below is the traceback from this internal error.\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"<ipython-input-4-8f8f27e0afb6>\", line 126, in <module>\n",
|
||||
" service = Webservice(name=service_name, workspace=ws)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\core\\webservice\\webservice.py\", line 190, in __new__\n",
|
||||
" 'workspace'.format(name))\n",
|
||||
"azureml.exceptions._azureml_exception.WebserviceException: WebserviceException:\n",
|
||||
"\tMessage: WebserviceNotFound: Webservice with name nlp-en-test not found in provided workspace\n",
|
||||
"\tInnerException None\n",
|
||||
"\tErrorResponse \n",
|
||||
"{\n",
|
||||
" \"error\": {\n",
|
||||
" \"message\": \"WebserviceNotFound: Webservice with name nlp-en-test not found in provided workspace\"\n",
|
||||
" }\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"During handling of the above exception, another exception occurred:\n",
|
||||
"\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 3326, in run_code\n",
|
||||
" exec(code_obj, self.user_global_ns, self.user_ns)\n",
|
||||
" File \"<ipython-input-4-8f8f27e0afb6>\", line 133, in <module>\n",
|
||||
" service = Model.deploy(ws, service_name, [model], inference_config, deployment_config=aci_config) #, overwrite=True)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\core\\model.py\", line 1597, in deploy\n",
|
||||
" deployment_target, overwrite)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\core\\model.py\", line 1785, in _deploy_with_environment\n",
|
||||
" inference_config._build_environment_image_request(workspace, [model.id for model in models])\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\core\\model.py\", line 2157, in _build_environment_image_request\n",
|
||||
" self._handle_assets(workspace, json_payload)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\core\\model.py\", line 2196, in _handle_assets\n",
|
||||
" True, os.path.basename(self.source_directory))\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\_model_management\\_util.py\", line 163, in upload_dependency\n",
|
||||
" dependency_tar.add(dependency, arcname=arcname) if arcname else dependency_tar.add(dependency)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\", line 1959, in add\n",
|
||||
" recursive, exclude, filter=filter)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\", line 1959, in add\n",
|
||||
" recursive, exclude, filter=filter)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\", line 1959, in add\n",
|
||||
" recursive, exclude, filter=filter)\n",
|
||||
" [Previous line repeated 2 more times]\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\", line 1952, in add\n",
|
||||
" self.addfile(tarinfo, f)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\", line 1980, in addfile\n",
|
||||
" copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\", line 252, in copyfileobj\n",
|
||||
" dst.write(buf)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\gzip.py\", line 264, in write\n",
|
||||
" self.fileobj.write(self.compress.compress(data))\n",
|
||||
"KeyboardInterrupt\n",
|
||||
"\n",
|
||||
"During handling of the above exception, another exception occurred:\n",
|
||||
"\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 2040, in showtraceback\n",
|
||||
" stb = value._render_traceback_()\n",
|
||||
"AttributeError: 'KeyboardInterrupt' object has no attribute '_render_traceback_'\n",
|
||||
"\n",
|
||||
"During handling of the above exception, another exception occurred:\n",
|
||||
"\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\ultratb.py\", line 1101, in get_records\n",
|
||||
" return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\ultratb.py\", line 319, in wrapped\n",
|
||||
" return f(*args, **kwargs)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\ultratb.py\", line 353, in _fixed_getinnerframes\n",
|
||||
" records = fix_frame_records_filenames(inspect.getinnerframes(etb, context))\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\inspect.py\", line 1490, in getinnerframes\n",
|
||||
" frameinfo = (tb.tb_frame,) + getframeinfo(tb, context)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\inspect.py\", line 1448, in getframeinfo\n",
|
||||
" filename = getsourcefile(frame) or getfile(frame)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\inspect.py\", line 696, in getsourcefile\n",
|
||||
" if getattr(getmodule(object, filename), '__loader__', None) is not None:\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\inspect.py\", line 739, in getmodule\n",
|
||||
" f = getabsfile(module)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\inspect.py\", line 708, in getabsfile\n",
|
||||
" _filename = getsourcefile(object) or getfile(object)\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\inspect.py\", line 693, in getsourcefile\n",
|
||||
" if os.path.exists(filename):\n",
|
||||
" File \"C:\\Users\\makayser\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\genericpath.py\", line 19, in exists\n",
|
||||
" os.stat(path)\n",
|
||||
"KeyboardInterrupt\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ename": "TypeError",
|
||||
"evalue": "must be str, not list",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[1;31mWebserviceException\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m<ipython-input-4-8f8f27e0afb6>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 125\u001b[0m \u001b[1;31m# Retrieve existing service.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 126\u001b[1;33m \u001b[0mservice\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mWebservice\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mname\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mservice_name\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mworkspace\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mws\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 127\u001b[0m \u001b[0mlogger\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwarn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'[INFO] Updating web service'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\core\\webservice\\webservice.py\u001b[0m in \u001b[0;36m__new__\u001b[1;34m(cls, workspace, name)\u001b[0m\n\u001b[0;32m 189\u001b[0m raise WebserviceException('WebserviceNotFound: Webservice with name {} not found in provided '\n\u001b[1;32m--> 190\u001b[1;33m 'workspace'.format(name))\n\u001b[0m\u001b[0;32m 191\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;31mWebserviceException\u001b[0m: WebserviceException:\n\tMessage: WebserviceNotFound: Webservice with name nlp-en-test not found in provided workspace\n\tInnerException None\n\tErrorResponse \n{\n \"error\": {\n \"message\": \"WebserviceNotFound: Webservice with name nlp-en-test not found in provided workspace\"\n }\n}",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\interactiveshell.py\u001b[0m in \u001b[0;36mrun_code\u001b[1;34m(self, code_obj, result, async_)\u001b[0m\n\u001b[0;32m 3325\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 3326\u001b[1;33m \u001b[0mexec\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mcode_obj\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0muser_global_ns\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0muser_ns\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 3327\u001b[0m \u001b[1;32mfinally\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m<ipython-input-4-8f8f27e0afb6>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 132\u001b[0m \u001b[0mlogger\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwarn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'[INFO] Creating web service'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 133\u001b[1;33m \u001b[0mservice\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mModel\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdeploy\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mws\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mservice_name\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mmodel\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0minference_config\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mdeployment_config\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0maci_config\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;31m#, overwrite=True)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 134\u001b[0m \u001b[0mservice\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwait_for_deployment\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mshow_output\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mTrue\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\core\\model.py\u001b[0m in \u001b[0;36mdeploy\u001b[1;34m(workspace, name, models, inference_config, deployment_config, deployment_target, overwrite)\u001b[0m\n\u001b[0;32m 1596\u001b[0m return Model._deploy_with_environment(workspace, name, models, inference_config, deployment_config,\n\u001b[1;32m-> 1597\u001b[1;33m deployment_target, overwrite)\n\u001b[0m\u001b[0;32m 1598\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\core\\model.py\u001b[0m in \u001b[0;36m_deploy_with_environment\u001b[1;34m(workspace, name, models, inference_config, deployment_config, deployment_target, overwrite)\u001b[0m\n\u001b[0;32m 1784\u001b[0m \u001b[0menvironment_image_request\u001b[0m \u001b[1;33m=\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m\\\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1785\u001b[1;33m \u001b[0minference_config\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_build_environment_image_request\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mworkspace\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mmodel\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mid\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mmodel\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mmodels\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1786\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\core\\model.py\u001b[0m in \u001b[0;36m_build_environment_image_request\u001b[1;34m(self, workspace, model_ids)\u001b[0m\n\u001b[0;32m 2156\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 2157\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_handle_assets\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mworkspace\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mjson_payload\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2158\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\core\\model.py\u001b[0m in \u001b[0;36m_handle_assets\u001b[1;34m(self, workspace, json_payload)\u001b[0m\n\u001b[0;32m 2195\u001b[0m (artifact_url, artifact_id) = upload_dependency(workspace, self.source_directory,\n\u001b[1;32m-> 2196\u001b[1;33m True, os.path.basename(self.source_directory))\n\u001b[0m\u001b[0;32m 2197\u001b[0m json_payload['assets'].append({'mimeType': 'application/octet-stream', 'id': artifact_id,\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\azureml\\_model_management\\_util.py\u001b[0m in \u001b[0;36mupload_dependency\u001b[1;34m(workspace, dependency, create_tar, arcname)\u001b[0m\n\u001b[0;32m 162\u001b[0m \u001b[0mdependency_tar\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtarfile\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mopen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdependency_path\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m'w:gz'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 163\u001b[1;33m \u001b[0mdependency_tar\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0madd\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdependency\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0marcname\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0marcname\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0marcname\u001b[0m \u001b[1;32melse\u001b[0m \u001b[0mdependency_tar\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0madd\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdependency\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 164\u001b[0m \u001b[0mdependency_tar\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mclose\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\u001b[0m in \u001b[0;36madd\u001b[1;34m(self, name, arcname, recursive, exclude, filter)\u001b[0m\n\u001b[0;32m 1958\u001b[0m self.add(os.path.join(name, f), os.path.join(arcname, f),\n\u001b[1;32m-> 1959\u001b[1;33m recursive, exclude, filter=filter)\n\u001b[0m\u001b[0;32m 1960\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\u001b[0m in \u001b[0;36madd\u001b[1;34m(self, name, arcname, recursive, exclude, filter)\u001b[0m\n\u001b[0;32m 1958\u001b[0m self.add(os.path.join(name, f), os.path.join(arcname, f),\n\u001b[1;32m-> 1959\u001b[1;33m recursive, exclude, filter=filter)\n\u001b[0m\u001b[0;32m 1960\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\u001b[0m in \u001b[0;36madd\u001b[1;34m(self, name, arcname, recursive, exclude, filter)\u001b[0m\n\u001b[0;32m 1958\u001b[0m self.add(os.path.join(name, f), os.path.join(arcname, f),\n\u001b[1;32m-> 1959\u001b[1;33m recursive, exclude, filter=filter)\n\u001b[0m\u001b[0;32m 1960\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\u001b[0m in \u001b[0;36madd\u001b[1;34m(self, name, arcname, recursive, exclude, filter)\u001b[0m\n\u001b[0;32m 1958\u001b[0m self.add(os.path.join(name, f), os.path.join(arcname, f),\n\u001b[1;32m-> 1959\u001b[1;33m recursive, exclude, filter=filter)\n\u001b[0m\u001b[0;32m 1960\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\u001b[0m in \u001b[0;36madd\u001b[1;34m(self, name, arcname, recursive, exclude, filter)\u001b[0m\n\u001b[0;32m 1958\u001b[0m self.add(os.path.join(name, f), os.path.join(arcname, f),\n\u001b[1;32m-> 1959\u001b[1;33m recursive, exclude, filter=filter)\n\u001b[0m\u001b[0;32m 1960\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\u001b[0m in \u001b[0;36madd\u001b[1;34m(self, name, arcname, recursive, exclude, filter)\u001b[0m\n\u001b[0;32m 1951\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mbltn_open\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mname\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m\"rb\"\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mf\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1952\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0maddfile\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mtarinfo\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mf\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1953\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\u001b[0m in \u001b[0;36maddfile\u001b[1;34m(self, tarinfo, fileobj)\u001b[0m\n\u001b[0;32m 1979\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mfileobj\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1980\u001b[1;33m \u001b[0mcopyfileobj\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfileobj\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfileobj\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mtarinfo\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msize\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mbufsize\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mbufsize\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1981\u001b[0m \u001b[0mblocks\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mremainder\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mdivmod\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mtarinfo\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msize\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mBLOCKSIZE\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\tarfile.py\u001b[0m in \u001b[0;36mcopyfileobj\u001b[1;34m(src, dst, length, exception, bufsize)\u001b[0m\n\u001b[0;32m 251\u001b[0m \u001b[1;32mraise\u001b[0m \u001b[0mexception\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"unexpected end of data\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 252\u001b[1;33m \u001b[0mdst\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwrite\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mbuf\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 253\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\gzip.py\u001b[0m in \u001b[0;36mwrite\u001b[1;34m(self, data)\u001b[0m\n\u001b[0;32m 263\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mlength\u001b[0m \u001b[1;33m>\u001b[0m \u001b[1;36m0\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 264\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfileobj\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwrite\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcompress\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcompress\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 265\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msize\u001b[0m \u001b[1;33m+=\u001b[0m \u001b[0mlength\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;31mKeyboardInterrupt\u001b[0m: ",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\interactiveshell.py\u001b[0m in \u001b[0;36mshowtraceback\u001b[1;34m(self, exc_tuple, filename, tb_offset, exception_only, running_compiled_code)\u001b[0m\n\u001b[0;32m 2039\u001b[0m \u001b[1;31m# in the engines. This should return a list of strings.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 2040\u001b[1;33m \u001b[0mstb\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mvalue\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_render_traceback_\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2041\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;31mAttributeError\u001b[0m: 'KeyboardInterrupt' object has no attribute '_render_traceback_'",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[1;31mTypeError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\interactiveshell.py\u001b[0m in \u001b[0;36mrun_code\u001b[1;34m(self, code_obj, result, async_)\u001b[0m\n\u001b[0;32m 3341\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mresult\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3342\u001b[0m \u001b[0mresult\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0merror_in_exec\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0msys\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mexc_info\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 3343\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshowtraceback\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrunning_compiled_code\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mTrue\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 3344\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3345\u001b[0m \u001b[0moutflag\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mFalse\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\interactiveshell.py\u001b[0m in \u001b[0;36mshowtraceback\u001b[1;34m(self, exc_tuple, filename, tb_offset, exception_only, running_compiled_code)\u001b[0m\n\u001b[0;32m 2041\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2042\u001b[0m stb = self.InteractiveTB.structured_traceback(etype,\n\u001b[1;32m-> 2043\u001b[1;33m value, tb, tb_offset=tb_offset)\n\u001b[0m\u001b[0;32m 2044\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2045\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_showtraceback\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0metype\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mstb\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\ultratb.py\u001b[0m in \u001b[0;36mstructured_traceback\u001b[1;34m(self, etype, value, tb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1383\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtb\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtb\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1384\u001b[0m return FormattedTB.structured_traceback(\n\u001b[1;32m-> 1385\u001b[1;33m self, etype, value, tb, tb_offset, number_of_lines_of_context)\n\u001b[0m\u001b[0;32m 1386\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1387\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\ultratb.py\u001b[0m in \u001b[0;36mstructured_traceback\u001b[1;34m(self, etype, value, tb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1286\u001b[0m \u001b[1;31m# Verbose modes need a full traceback\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1287\u001b[0m return VerboseTB.structured_traceback(\n\u001b[1;32m-> 1288\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0metype\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mtb\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mtb_offset\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mnumber_of_lines_of_context\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1289\u001b[0m )\n\u001b[0;32m 1290\u001b[0m \u001b[1;32melif\u001b[0m \u001b[0mmode\u001b[0m \u001b[1;33m==\u001b[0m \u001b[1;34m'Minimal'\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\AppData\\Local\\Continuum\\anaconda3\\envs\\nlp\\lib\\site-packages\\IPython\\core\\ultratb.py\u001b[0m in \u001b[0;36mstructured_traceback\u001b[1;34m(self, etype, evalue, etb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1148\u001b[0m \u001b[0mexception\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mget_parts_of_chained_exception\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mevalue\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1149\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mexception\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1150\u001b[1;33m \u001b[0mformatted_exceptions\u001b[0m \u001b[1;33m+=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mprepare_chained_exception_message\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mevalue\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m__cause__\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1151\u001b[0m \u001b[0metype\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mevalue\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0metb\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mexception\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1152\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;31mTypeError\u001b[0m: must be str, not list"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"\"\"\"\n",
|
||||
"PREPARE FOR DEPLOYMENT\n",
|
||||
"\n",
|
||||
"NOTE: set language in custom.py before running\n",
|
||||
"\n",
|
||||
"Example (in the command line):\n",
|
||||
"> cd to root dir\n",
|
||||
"> conda activate nlp\n",
|
||||
"> python code/aml_deploy.py\n",
|
||||
"\n",
|
||||
"TODO:\n",
|
||||
"- fetch flair model if not found\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"logger = he.get_logger(location=__name__)\n",
|
||||
"\n",
|
||||
"language = 'en'\n",
|
||||
"env = 'test'\n",
|
||||
"version = '0.3'\n",
|
||||
"upload = False\n",
|
||||
"\n",
|
||||
"##############################\n",
|
||||
"## CONNECT TO WORKSPACE\n",
|
||||
"##############################\n",
|
||||
"\n",
|
||||
"# try:\n",
|
||||
"# auth = MsiAuthentication()\n",
|
||||
"# except Exception as e:\n",
|
||||
"# logger.warn(e)\n",
|
||||
"auth = InteractiveLoginAuthentication(tenant_id=\"72f988bf-86f1-41af-91ab-2d7cd011db47\")\n",
|
||||
"\n",
|
||||
"ws = Workspace.get(name='nlp-ml', \n",
|
||||
" subscription_id='50324bce-875f-4a7b-9d3c-0e33679f5d72', \n",
|
||||
" resource_group='nlp', \n",
|
||||
" auth=auth)\n",
|
||||
"\n",
|
||||
"# ws = Workspace.from_config()\n",
|
||||
"logger.warn(f'[INFO] Loaded Workspace {ws.name} ({ws.resource_group})')\n",
|
||||
"\n",
|
||||
"#NOTE:\n",
|
||||
"# Change language setting in custom.py before deployment!\n",
|
||||
"\n",
|
||||
"model_name = f'nlp_{language}_{env}'\n",
|
||||
"if upload:\n",
|
||||
" logger.warn(f'[INFO] Uploading model assets -> {model_name}')\n",
|
||||
"\n",
|
||||
" # Zip Assets\n",
|
||||
" dt_assets = dt.Data()\n",
|
||||
" with zipfile.ZipFile(dt_assets.fn_lookup['fn_asset'], 'w') as z:\n",
|
||||
" for f in cu.params.get('assets'):\n",
|
||||
" fp = dt_assets.fn_lookup[f]\n",
|
||||
" # print(fp)\n",
|
||||
" if '.' in fp:\n",
|
||||
" _new_fn = fp.split('/')[-1]\n",
|
||||
" z.write(fp, arcname=_new_fn)\n",
|
||||
" print(f'Zipped : {fp} -> {_new_fn}')\n",
|
||||
" else:\n",
|
||||
" # Iterate over all the files in directory\n",
|
||||
" for folderName, subfolders, filenames in os.walk(fp):\n",
|
||||
" for filename in filenames:\n",
|
||||
" filePath = os.path.join(folderName, filename)\n",
|
||||
" _new_fn = folderName.split('/')[-1] + '/' + filename\n",
|
||||
" z.write(filePath, arcname=_new_fn)\n",
|
||||
" print(f'Zipped : {filePath} -> {_new_fn}')\n",
|
||||
" \n",
|
||||
" # Upload Assets\n",
|
||||
" model = Model.register(workspace=ws,\n",
|
||||
" model_name=model_name, # Name of the registered model in your workspace.\n",
|
||||
" model_path=dt_assets.fn_lookup['fn_asset'], # Local file to upload and register as a model.\n",
|
||||
" # resource_configuration=ResourceConfiguration(cpu=2, memory_in_gb=4),\n",
|
||||
" description='NLP inference assets',\n",
|
||||
" tags={'models': 'classification, ner, qa', \n",
|
||||
" 'version': version,\n",
|
||||
" 'language': language, \n",
|
||||
" 'environment': env})\n",
|
||||
"else:\n",
|
||||
" logger.warn(f'[INFO] Using existing model -> {model_name}')\n",
|
||||
" model = Model(ws, model_name)\n",
|
||||
"\n",
|
||||
"logger.warn(f'[INFO] \\t Model Name: {model.name}')\n",
|
||||
"logger.warn(f'[INFO] \\t Model Version: {model.version}')\n",
|
||||
"\n",
|
||||
"##############################\n",
|
||||
"## DEPLOY COMPUTE\n",
|
||||
"##############################\n",
|
||||
"aci_config = AciWebservice.deploy_configuration(cpu_cores=2, memory_gb=4) #, auth_enabled=True)\n",
|
||||
"\n",
|
||||
"environment = Environment('farmenv')\n",
|
||||
"environment.python.conda_dependencies = CondaDependencies.create(pip_packages=[\n",
|
||||
" 'azureml-defaults',\n",
|
||||
" 'spacy',\n",
|
||||
" 'transformers==2.3.0',\n",
|
||||
" 'scipy',\n",
|
||||
" 'numpy',\n",
|
||||
" 'azure-storage-blob',\n",
|
||||
" 'tqdm',\n",
|
||||
" 'boto3',\n",
|
||||
" 'scipy>=1.3.2',\n",
|
||||
" 'sklearn',\n",
|
||||
" 'seqeval',\n",
|
||||
" 'mlflow==1.0.0',\n",
|
||||
" 'dotmap==1.3.0',\n",
|
||||
" 'git+https://github.com/deepset-ai/FARM.git',\n",
|
||||
" 'git+https://github.com/zalandoresearch/flair.git'\n",
|
||||
" ],\n",
|
||||
" conda_packages=[\n",
|
||||
" 'pytorch',\n",
|
||||
" 'torchvision',\n",
|
||||
" 'gensim',\n",
|
||||
" 'numpy',\n",
|
||||
" 'pandas'\n",
|
||||
" ])\n",
|
||||
"\n",
|
||||
"service_name = f'nlp-{language}-{env}'\n",
|
||||
"\n",
|
||||
"inference_config = InferenceConfig(entry_script='code/infer.py',\n",
|
||||
" source_directory='.',\n",
|
||||
" environment=environment)\n",
|
||||
"\n",
|
||||
"# Remove any existing service under the same name.\n",
|
||||
"try:\n",
|
||||
" #Webservice(ws, service_name).delete()\n",
|
||||
" # Retrieve existing service.\n",
|
||||
" service = Webservice(name=service_name, workspace=ws)\n",
|
||||
" logger.warn('[INFO] Updating web service')\n",
|
||||
" # Update to new model(s).\n",
|
||||
" service.update(models=[model], inference_config=inference_config)\n",
|
||||
" service.wait_for_deployment(show_output=True)\n",
|
||||
"except WebserviceException:\n",
|
||||
" logger.warn('[INFO] Creating web service')\n",
|
||||
" service = Model.deploy(ws, service_name, [model], inference_config, deployment_config=aci_config) #, overwrite=True)\n",
|
||||
" service.wait_for_deployment(show_output=True)\n",
|
||||
"logger.warn(service.get_keys)\n",
|
||||
"\n",
|
||||
"try:\n",
|
||||
" service.run(json.dumps([{\"body\": \"Mein Windows Vista rechner will nicht mehr - ich kriege dauernd fehler meldungen. Ich wollte mir eh einen neuen kaufen, aber ich hab kein Geld. Kann Bill Gates mir helfen?\"}]))\n",
|
||||
"except Exception as e:\n",
|
||||
" logger.warn('[ERROR] Service was not deployed as expected. {e}')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "nlp",
|
||||
"language": "python",
|
||||
"name": "nlp"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 4
|
||||
}
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
Загрузка…
Ссылка в новой задаче