Merge branch 'master' into master
This commit is contained in:
Коммит
64a209a354
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"name": "TESTSERVICENAME",
|
||||
"inference_source_directory": "tests/deploy/",
|
||||
"inference_entry_script": "score.py",
|
||||
"conda_file": "environment.yml",
|
||||
"test_enabled": false,
|
||||
"delete_service_after_deployment": true
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"name": "TESTSERVICENAME",
|
||||
"deployment_compute_target": "aks-intTest",
|
||||
"inference_source_directory": "tests/deploy/",
|
||||
"inference_entry_script": "score.py",
|
||||
"conda_file": "environment.yml",
|
||||
"delete_service_after_deployment": true
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"name": "gha_and_aml_workspace",
|
||||
"resource_group": "gha_and_aml_rg"
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
name: Integration Test
|
||||
on: [push]
|
||||
on: [push , pull_request]
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -8,9 +8,38 @@ jobs:
|
|||
id: checkout_repository
|
||||
uses: actions/checkout@master
|
||||
|
||||
- name: Self test
|
||||
id: selftest
|
||||
uses: azure/aml-deploy@master
|
||||
# Connect to existing workspace
|
||||
- name: Create Workspace
|
||||
id: ws_create
|
||||
uses: Azure/aml-workspace@v1
|
||||
with:
|
||||
azure_credentials: ${{ secrets.AZURE_CREDENTIALS }}
|
||||
continue-on-error: true
|
||||
parameters_file: "test/test_workspace.json"
|
||||
|
||||
- name: Update Service Name Bash for ACI
|
||||
run: |
|
||||
sed -i -e 's/TESTSERVICENAME/'aci$RANDOM'/g' .cloud/.azure/test/test_aci_deploy.json
|
||||
|
||||
# Deploy model in Azure Machine Learning to ACI
|
||||
- name: Deploy model to ACI
|
||||
id: aml_deploy_aci
|
||||
uses: ./
|
||||
with:
|
||||
azure_credentials: ${{ secrets.AZURE_CREDENTIALS }}
|
||||
model_name: mymodel
|
||||
model_version: 1
|
||||
parameters_file: "test/test_aci_deploy.json"
|
||||
|
||||
- name: Update Service Name Bash for AKS
|
||||
run: |
|
||||
sed -i -e 's/TESTSERVICENAME/'aks$RANDOM'/g' .cloud/.azure/test/test_aks_deploy.json
|
||||
|
||||
# Deploy model in Azure Machine Learning to AKS
|
||||
- name: Deploy model to AKS
|
||||
id: aml_deploy_aks
|
||||
uses: ./
|
||||
with:
|
||||
azure_credentials: ${{ secrets.AZURE_CREDENTIALS }}
|
||||
model_name: mymodel
|
||||
model_version: 1
|
||||
parameters_file: "test/test_aks_deploy.json"
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
|
||||
# GitHub Action for deploying Machine Learning Models to Azure
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
The Deploy Machine Learning Models to Azure action will deploy your model on [Azure Machine Learning](https://azure.microsoft.com/en-us/services/machine-learning/) using GitHub Actions.
|
||||
|
|
|
@ -40,7 +40,7 @@ def main():
|
|||
azure_credentials = json.loads(azure_credentials)
|
||||
except JSONDecodeError:
|
||||
print("::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS")
|
||||
raise AMLConfigurationException(f"Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-compute/blob/master/README.md")
|
||||
raise AMLConfigurationException("Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-compute/blob/master/README.md")
|
||||
|
||||
# Checking provided parameters
|
||||
print("::debug::Checking provided parameters")
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
# Conda environment specification. Details about the Conda environment file format:
|
||||
# https://conda.io/docs/user-guide/tasks/manage-environments.html#create-env-file-manually
|
||||
|
||||
name: deployment_env
|
||||
dependencies:
|
||||
- python=3.6.2
|
||||
- scikit-learn=0.22.2
|
||||
- numpy=1.18.1
|
||||
- pip:
|
||||
- azureml-defaults==1.1.5
|
||||
- azureml-monitoring==0.1.0a18
|
||||
- inference-schema==1.0.2
|
||||
- inference-schema[numpy-support]==1.0.2
|
||||
channels:
|
||||
- conda-forge
|
|
@ -0,0 +1,40 @@
|
|||
import joblib
|
||||
import numpy as np
|
||||
|
||||
from azureml.core import Model
|
||||
from azureml.monitoring import ModelDataCollector
|
||||
from inference_schema.schema_decorators import input_schema, output_schema
|
||||
from inference_schema.parameter_types.numpy_parameter_type import NumpyParameterType
|
||||
from inference_schema.parameter_types.standard_py_parameter_type import StandardPythonParameterType
|
||||
|
||||
|
||||
# The init() method is called once, when the web service starts up.
|
||||
# Typically you would deserialize the model file, as shown here using joblib,
|
||||
# and store it in a global variable so your run() method can access it later.
|
||||
def init():
|
||||
global model
|
||||
global inputs_dc, prediction_dc
|
||||
# The AZUREML_MODEL_DIR environment variable indicates
|
||||
# a directory containing the model file you registered.
|
||||
model_path = Model.get_model_path(model_name="mymodel")
|
||||
model = joblib.load(model_path)
|
||||
inputs_dc = ModelDataCollector("sample-model", designation="inputs", feature_names=["feat1", "feat2", "feat3", "feat4"])
|
||||
prediction_dc = ModelDataCollector("sample-model", designation="predictions", feature_names=["prediction"])
|
||||
|
||||
|
||||
# The run() method is called each time a request is made to the scoring API.
|
||||
# Shown here are the optional input_schema and output_schema decorators
|
||||
# from the inference-schema pip package. Using these decorators on your
|
||||
# run() method parses and validates the incoming payload against
|
||||
# the example input you provide here. This will also generate a Swagger
|
||||
# API document for your web service.
|
||||
@input_schema('data', NumpyParameterType(np.array([[0.1, 1.2, 2.3, 3.4]])))
|
||||
@output_schema(StandardPythonParameterType({'predict': [['Iris-virginica']]}))
|
||||
def run(data):
|
||||
# Use the model object loaded by init().
|
||||
result = model.predict(data)
|
||||
inputs_dc.collect(data) # this call is saving our input data into Azure Blob
|
||||
prediction_dc.collect(result) # this call is saving our input data into Azure Blob
|
||||
|
||||
# You can return any JSON-serializable object.
|
||||
return {"predict": result.tolist()}
|
Загрузка…
Ссылка в новой задаче