зеркало из
1
0
Форкнуть 0

Merge branch 'master' into feature/textanalyticsexample

This commit is contained in:
Dennis Eikelenboom 2020-03-04 12:23:01 +01:00 коммит произвёл GitHub
Родитель 6b55a97ce1 f509205394
Коммит 18536daf24
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
106 изменённых файлов: 601 добавлений и 5345 удалений

1
.azureml/config.json Normal file
Просмотреть файл

@ -0,0 +1 @@
{"Id": null, "Scope": "/subscriptions/cf4e1704-b4bc-4554-bcd7-309394f2ee56/resourceGroups/azuremlworkshoprgp/providers/Microsoft.MachineLearningServices/workspaces/azuremlworkshopws"}

3
.gitignore поставляемый
Просмотреть файл

@ -12,3 +12,6 @@ assets
Code/Modeling/aml_config
Code/Modeling/assets
Code/Modeling/.amlignore
# ignore tmp folders
code/Data_Acquisition_and_Understanding/tmp/*

Просмотреть файл

@ -1,3 +1 @@
# This folder hosts code for data acquisition and understanding (exploratory analysis)
You can add a detailed description in this markdown related to your specific data science project.
# This folder hosts production-intended data preparation logic

Просмотреть файл

@ -1,3 +0,0 @@
# Databricks notebooks can be synced with Git. For code organization
# in line with this template, provide `Code/Data_Acquistion_and_Understanding`
# as a base path in Databricks.

Просмотреть файл

@ -1 +0,0 @@
{}

Просмотреть файл

@ -1 +0,0 @@
{}

Просмотреть файл

@ -1 +0,0 @@
{}

Просмотреть файл

@ -0,0 +1,34 @@
# Defines a tabular dataset on top of an Azure ML datastore
from azureml.core import Workspace, Datastore, Dataset
from azureml.data import DataType
from azureml.core.authentication import AzureCliAuthentication
# Retrieve a datastore from a ML workspace
ws = Workspace.from_config(auth=AzureCliAuthentication())
datastore_name = 'workspaceblobstore'
datastore = Datastore.get(ws, datastore_name)
# Register dataset version for each data split
for data_split in ['train', 'test']:
# Create a TabularDataset from paths in datastore in split folder
# Note that wildcards can be used
datastore_paths = [
(datastore, '{}/*.csv'.format(data_split))
]
# Create a TabularDataset from paths in datastore
dataset = Dataset.Tabular.from_delimited_files(
path=datastore_paths,
set_column_types={
'text': DataType.to_string(),
'target': DataType.to_string()
},
header=True
)
# Register the defined dataset for later use
dataset.register(
workspace=ws,
name='newsgroups_{}'.format(data_split),
description='newsgroups data'
)

Просмотреть файл

@ -0,0 +1,74 @@
# Pre-processes SKLearn sample data
# Ingest the data into an Azure ML Datastore for training
import pandas as pd
import time
import os
from sklearn.datasets import fetch_20newsgroups
from azureml.core import Workspace, Datastore
from azureml.core.authentication import AzureCliAuthentication
# Define newsgroup categories to be downloaded to generate sample dataset
# @TODO add additional newsgroups
categories = [
'alt.atheism',
'talk.religion.misc',
'comp.graphics',
'sci.space',
]
print("Loading 20 newsgroups dataset for categories:")
print(categories if categories else "all")
for data_split in ['train', 'test']:
# retrieve newsgroup data
newsgroupdata = fetch_20newsgroups(
subset=data_split,
categories=categories,
shuffle=True,
random_state=42
)
# construct pandas data frame from loaded sklearn newsgroup data
df = pd.DataFrame({
'text': newsgroupdata.data,
'target': newsgroupdata.target
})
print('data loaded')
# pre-process:
# remove line breaks
# replace target index by newsgroup name
target_names = newsgroupdata.target_names
df.target = df.target.apply(lambda x: target_names[x])
df.text = df.text.replace('\n', ' ', regex=True)
print(df.head(5))
# write to csv
df.to_csv(os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'tmp',
data_split,
'{}.csv'.format(int(time.time())) # unique file name
), index=False, encoding="utf-8", line_terminator='\n')
datastore_name = 'workspaceblobstore'
# get existing ML workspace
workspace = Workspace.from_config(auth=AzureCliAuthentication())
# retrieve an existing datastore in the workspace by name
datastore = Datastore.get(workspace, datastore_name)
# upload files
datastore.upload(
src_dir=os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'tmp'
),
target_path=None,
overwrite=True,
show_progress=True
)

Просмотреть файл

@ -1,2 +0,0 @@
# Logic to upload sample dataset (see Sample_Data folder) to AML Datastore
# @TODO for starter template

Просмотреть файл

@ -1 +0,0 @@
# This folder hosts production-intended data preparation logic

Просмотреть файл

@ -4,43 +4,20 @@ from azureml.train.hyperdrive import (
from azureml.core import Workspace, Experiment
from azureml.train.estimator import Estimator
import pandas as pd
from azureml.core.compute import ComputeTarget, AmlCompute
import os
from random import choice
from azureml.core.authentication import AzureCliAuthentication
ws = Workspace(
"e0eeddf8-2d02-4a01-9786-92bb0e0cb692", "azure-ml-rg",
"azure-machine-learning-ws",
auth=None, _location=None, _disable_service_check=False,
_workspace_id=None, sku='basic'
)
# load Azure ML workspace
workspace = Workspace.from_config(auth=AzureCliAuthentication())
cluster_name = 'hypetuning'
provisioning_config = AmlCompute.provisioning_configuration(
vm_size='Standard_D4_v2',
# vm_priority = 'lowpriority', # optional
max_nodes=16)
if cluster_name in ws.compute_targets:
compute_target = ws.compute_targets[cluster_name]
if type(compute_target) is not AmlCompute:
raise Exception('Compute target {} is not an AML cluster.'
.format(cluster_name))
print('Using pre-existing AML cluster {}'.format(cluster_name))
else:
# Create the cluster
compute_target = ComputeTarget.create(ws, cluster_name,
provisioning_config)
compute_target.wait_for_completion(show_output=True,
min_node_count=None,
timeout_in_minutes=20)
# Define Run Configuration
estimator = Estimator(
source_directory=os.path.dirname(os.path.realpath(__file__)),
compute_target=compute_target,
entry_script='hypertrain.py',
source_directory=os.path.dirname(os.path.realpath(__file__)),
compute_target=workspace.compute_targets[cluster_name],
pip_packages=[
'numpy==1.15.4',
'pandas==0.23.4',
@ -51,6 +28,7 @@ estimator = Estimator(
]
)
# Set parameters for search
param_sampling = RandomParameterSampling({
"max_depth": choice([100, 50, 20, 10]),
"n_estimators": choice([50, 150, 200, 250]),
@ -59,6 +37,7 @@ param_sampling = RandomParameterSampling({
}
)
# Define multi-run configuration
hyperdrive_run_config = HyperDriveConfig(
estimator=estimator,
hyperparameter_sampling=param_sampling,
@ -69,42 +48,48 @@ hyperdrive_run_config = HyperDriveConfig(
max_concurrent_runs=None
)
experiment = Experiment(ws, "testhypertuning")
# Define the ML experiment
experiment = Experiment(workspace, "newsgroups_train_hypertune")
hyperdrive_run = experiment.submit(hyperdrive_run_config)
hyperdrive_run.wait_for_completion()
# run = experiment.submit(estimator)
# Select the best run from all submitted
best_run = hyperdrive_run.get_best_run_by_primary_metric()
best_run_metrics = best_run.get_metrics()
# Log the best run's performance to the parent run
hyperdrive_run.log("Accuracy", best_run_metrics['accuracy'])
parameter_values = best_run.get_details()['runDefinition']['arguments']
# Print best set of parameters found
best_parameters = dict(zip(parameter_values[::2], parameter_values[1::2]))
pd.Series(best_parameters, name='Value').to_frame()
model_parameters = best_parameters.copy()
pd.Series(model_parameters, name='Value').to_frame()
print(model_parameters)
best_model_parameters = best_parameters.copy()
pd.Series(best_model_parameters, name='Value').to_frame()
print(best_model_parameters)
# model_parameters['--data-folder'] = ds.as_mount()
exp = Experiment(ws, "finalmodel")
# Define a final training run with model's best parameters
model_est = Estimator(
entry_script='hypertrain.py',
source_directory=os.path.dirname(os.path.realpath(__file__)),
script_params=best_model_parameters,
compute_target=workspace.compute_targets[cluster_name],
pip_packages=[
'numpy==1.15.4',
'pandas==0.23.4',
'scikit-learn==0.20.1',
'scipy==1.0.0',
'matplotlib==3.0.2',
'utils==0.9.0'
]
)
model_est = Estimator(source_directory=os.path.dirname(os.path.realpath(__file__)),
entry_script='hypertrain.py',
script_params=model_parameters,
compute_target=compute_target,
pip_packages=[
'numpy==1.15.4',
'pandas==0.23.4',
'scikit-learn==0.20.1',
'scipy==1.0.0',
'matplotlib==3.0.2',
'utils==0.9.0'])
# Submit the experiment
model_run = experiment.submit(model_est)
model_run = exp.submit(model_est)
model_run_status = model_run.wait_for_completion(wait_post_processing=True)
model = model_run.register_model(model_name='model',
model_path=os.path.join('outputs', 'model.pkl'))

Просмотреть файл

@ -1 +0,0 @@
# This folder represents an example of code organization where all code related to a specific ML model is shipped as modules, and is logically grouped as a package in Python

Просмотреть файл

@ -1,3 +0,0 @@
"""
Model Explainability analysis
"""

Просмотреть файл

@ -1,49 +0,0 @@
class Modeler:
"""
Model Factory Class
Train a model
"""
def __init__(self):
"""
Constructor
"""
self.model = None
return
def validate_data(self, df):
"""
Validate data integrity before training
Basic stasistics, data assumptions
"""
return
def splitData(self, df):
"""
Split dataset
"""
return df, df, df, df
def train(self, X_train, y_train):
"""
Cross validate and train a model
"""
# sample model as template placeholder
self.model = None
# return training results for logging
cv_scores = {}
return cv_scores
def score(self, df):
"""
Score a dataset using a trained model
"""
return
def explain(self, df):
"""
Explain a trained model's predictions
"""
return

Просмотреть файл

@ -64,26 +64,6 @@ training_pipeline = Pipeline(workspace=ws, steps=[trainStep])
training_pipeline.validate()
print("Pipeline validation complete")
if run_experiment:
# Submit pipeline run
pipeline_run = Experiment(ws, exp_name).submit(training_pipeline)
pipeline_run.wait_for_completion()
# create output folder
os.makedirs(output_dir_local, exist_ok=True)
# Download trained model artifacts to local compute
run_train_step = list(pipeline_run.get_children())[0]
for file in ['model.pkl']:
run_train_step.download_file(
name=os.path.join(output_dir, file),
output_file_path=os.path.join(output_dir_local, file)
)
if register_model:
# @TODO Register model, version by build id
print('Registered model')
if publish_pipeline:
# @TODO Publish pipeline, version by build id
print('Published pipeline')
# Submit pipeline run
pipeline_run = Experiment(ws, exp_name).submit(training_pipeline)
pipeline_run.wait_for_completion()

Просмотреть файл

@ -1 +0,0 @@
# This folder contains definitions for ML Pipelines. ML Pipelines define machine learning workflows that may consist of multiple execution steps that could run on heterogenous compute types. Compute steps may share data resources to pipe in- and outputs

Просмотреть файл

@ -1,3 +0,0 @@
"""
@TODO
"""

Просмотреть файл

@ -1,3 +0,0 @@
"""
@TODO
"""

Просмотреть файл

Просмотреть файл

@ -1 +0,0 @@
# Integration tests

Просмотреть файл

@ -1,2 +0,0 @@
# Integration test for inference code
# @TODO

Просмотреть файл

@ -1 +0,0 @@
# This folder contains unit tests for the sample package modelpackage.

Просмотреть файл

@ -1 +0,0 @@
# Subpackage unit tests

Просмотреть файл

@ -1,2 +0,0 @@
def test_mock():
return True

Просмотреть файл

@ -206,4 +206,4 @@ benchmark(
]
),
name="LinearSVC with L1-based feature selection"
)
)

Просмотреть файл

@ -0,0 +1,212 @@
import numpy as np
from optparse import OptionParser
from time import time
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_selection import SelectFromModel
from sklearn.feature_selection import SelectKBest, chi2
from sklearn.linear_model import RidgeClassifier
from sklearn.pipeline import Pipeline
from sklearn.svm import LinearSVC
from sklearn.linear_model import SGDClassifier
from sklearn.linear_model import Perceptron
from sklearn.linear_model import PassiveAggressiveClassifier
from sklearn.naive_bayes import BernoulliNB, ComplementNB, MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestCentroid
from sklearn.ensemble import RandomForestClassifier
from sklearn.utils.extmath import density
from sklearn import metrics
from sklearn.externals import joblib
from azureml.core import Run
op = OptionParser()
op.add_option("--report",
action="store_true", dest="print_report",
help="Print a detailed classification report.")
op.add_option("--chi2_select",
action="store", type="int", dest="select_chi2",
help="Select some number of features using a chi-squared test")
op.add_option("--confusion_matrix",
action="store_true", dest="print_cm",
help="Print the confusion matrix.")
op.add_option("--top10",
action="store_true", dest="print_top10",
help="Print ten most discriminative terms per class"
" for every classifier.")
op.add_option("--all_categories",
action="store_true", dest="all_categories",
help="Whether to use all categories or not.")
op.add_option("--use_hashing",
action="store_true",
help="Use a hashing vectorizer.")
op.add_option("--n_features",
action="store", type=int, default=2 ** 16,
help="n_features when using the hashing vectorizer.")
op.add_option("--filtered",
action="store_true",
help="Remove newsgroup information that is easily overfit: "
"headers, signatures, and quoting.")
# Retrieve the run and its context (datasets etc.)
run = Run.get_context()
# Load the input datasets from Azure ML
dataset_train = run.input_datasets['train'].to_pandas_dataframe()
dataset_test = run.input_datasets['test'].to_pandas_dataframe()
# Pre-process df for sklearn
# convert to numpy df
data_train = dataset_train.text.values
data_test = dataset_test.text.values
# save orginal target names
target_names = data_train.target_names
# convert label to int
y_train = dataset_train.target.values
y_test = dataset_test.target.values
# Extracting features from the training data using a sparse vectorizer")
vectorizer = HashingVectorizer(
stop_words='english',
alternate_sign=False,
n_features=op.n_features
)
X_train = vectorizer.transform(data_train.data)
# Extracting features from the test data using the same vectorizer
X_test = vectorizer.transform(data_test.data)
# mapping from integer feature name to original token string
feature_names = vectorizer.get_feature_names()
# # Extracting %d best features by a chi-squared test
# ch2 = SelectKBest(chi2, k=op.select_chi2)
# X_train = ch2.fit_transform(X_train, y_train)
# X_test = ch2.transform(X_test)
# keep selected feature names
# feature_names = [feature_names[i] for i
# in ch2.get_support(indices=True)]
# feature_names = np.asarray(feature_names)
def trim(s):
"""Trim string to fit on terminal (assuming 80-column display)"""
return s if len(s) <= 80 else s[:77] + "..."
def benchmark(clf, name):
print('_' * 80)
print("Training: ")
print(clf)
t0 = time()
clf.fit(X_train, y_train)
train_time = time() - t0
print("train time: %0.3fs" % train_time)
t0 = time()
pred = clf.predict(X_test)
test_time = time() - t0
print("test time: %0.3fs" % test_time)
score = metrics.accuracy_score(y_test, pred)
child_run = run.child_run(name=name)
child_run.log("accuracy", float(score))
model_name = "model" + str(name) + ".pkl"
filename = "outputs/" + model_name
joblib.dump(value=clf, filename=filename)
child_run.upload_file(name=model_name, path_or_stream=filename)
print("accuracy: %0.3f" % score)
if hasattr(clf, 'coef_'):
print("dimensionality: %d" % clf.coef_.shape[1])
print("density: %f" % density(clf.coef_))
if op.print_top10 and feature_names is not None:
print("top 10 keywords per class:")
for i, label in enumerate(target_names):
top10 = np.argsort(clf.coef_[i])[-10:]
print(trim("%s: %s" % (label, " ".join(feature_names[top10]))))
print()
if op.print_report:
print("classification report:")
print(metrics.classification_report(y_test, pred,
target_names=target_names))
if op.print_cm:
print("confusion matrix:")
print(metrics.confusion_matrix(y_test, pred))
print()
clf_descr = str(clf).split('(')[0]
child_run.complete()
return clf_descr, score, train_time, test_time
results = []
for clf, name in (
(RidgeClassifier(tol=1e-2, solver="sag"), "Ridge Classifier"),
(Perceptron(max_iter=50), "Perceptron"),
(PassiveAggressiveClassifier(max_iter=50),
"Passive-Aggressive"),
(KNeighborsClassifier(n_neighbors=10), "kNN"),
(RandomForestClassifier(), "Random forest")):
print('=' * 80)
print(name)
results.append(benchmark(clf, name))
for penalty in ["l2", "l1"]:
print('=' * 80)
print("%s penalty" % penalty.upper())
# Train Liblinear model
name = penalty + "LinearSVC"
results.append(benchmark(LinearSVC(penalty=penalty, dual=False,
tol=1e-3)))
# Train SGD model
name = penalty + "SGDClassifier"
results.append(benchmark(SGDClassifier(alpha=.0001, max_iter=50,
penalty=penalty)))
# Train SGD with Elastic Net penalty
print('=' * 80)
print("Elastic-Net penalty")
name = "Elastic-Net penalty"
results.append(benchmark(SGDClassifier(alpha=.0001, max_iter=50,
penalty="elasticnet")))
# Train NearestCentroid without threshold
print('=' * 80)
print("NearestCentroid (aka Rocchio classifier)")
name ="NearestCentroid (aka Rocchio classifier)"
results.append(benchmark(NearestCentroid()))
# Train sparse Naive Bayes classifiers
print('=' * 80)
print("Naive Bayes")
name = "Naive Bayes MultinomialNB"
results.append(benchmark(MultinomialNB(alpha=.01)))
name = "Naive Bayes BernoulliNB"
results.append(benchmark(BernoulliNB(alpha=.01)))
name = "Naive Bayes ComplementNB"
results.append(benchmark(ComplementNB(alpha=.1)))
print('=' * 80)
print("LinearSVC with L1-based feature selection")
# The smaller C, the stronger the regularization.
# The more regularization, the more sparsity.
name = "LinearSVC with L1-based feature selection"
results.append(benchmark(Pipeline([
('feature_selection', SelectFromModel(LinearSVC(penalty="l1", dual=False,
tol=1e-3))),
('classification', LinearSVC(penalty="l2"))])))

Просмотреть файл

@ -1,15 +1,40 @@
"""
Training submitter
Facilitates (remote) training execution through the Azure ML service.
"""
import os
from azureml.core import Workspace, Experiment
from azureml.train.estimator import Estimator
from azureml.core.authentication import AzureCliAuthentication
# load Azure ML workspace
workspace = Workspace.from_config(auth=AzureCliAuthentication())
# Define Run Configuration
est = Estimator(
entry_script='train.py',
source_directory=os.path.dirname(os.path.realpath(__file__)),
compute_target='local',
conda_packages=[
'pip==20.0.2'
],
pip_packages=[
'numpy==1.15.4',
'pandas==0.23.4',
'scikit-learn==0.20.1',
'scipy==1.0.0',
'matplotlib==3.0.2',
'utils==0.9.0'
],
use_docker=False
)
# Define the ML experiment
experiment = Experiment(workspace, "newsgroups_train")
# Submit experiment run, if compute is idle, this may take some time')
run = experiment.submit(est)
# wait for run completion of the run, while showing the logs
run.wait_for_completion(show_output=True)

Просмотреть файл

@ -0,0 +1,48 @@
"""
Training submitter
Facilitates (remote) training execution through the Azure ML service.
"""
import os
from azureml.core import Workspace, Experiment
from azureml.train.estimator import Estimator
from azureml.core.authentication import AzureCliAuthentication
# load Azure ML workspace
workspace = Workspace.from_config(auth=AzureCliAuthentication())
# retrieve datasets used for training
dataset_train = Dataset.get_by_name(workspace, name='newsgroups_train')
dataset_test = Dataset.get_by_name(workspace, name='newsgroups_test')
# Define Run Configuration
est = Estimator(
entry_script='train.py',
source_directory=os.path.dirname(os.path.realpath(__file__)),
compute_target='local',
conda_packages=[
'pip==20.0.2'
],
pip_packages=[
'numpy==1.15.4',
'pandas==0.23.4',
'scikit-learn==0.20.1',
'scipy==1.0.0',
'matplotlib==3.0.2',
'utils==0.9.0'
],
use_docker=False,
inputs=[
dataset_train.as_named_input('train'),
dataset_train.as_named_input('test')
],
)
# Define the ML experiment
experiment = Experiment(workspace, "newsgroups_train")
# Submit experiment run, if compute is idle, this may take some time')
run = experiment.submit(est)
# wait for run completion of the run, while showing the logs
run.wait_for_completion(show_output=True)

Просмотреть файл

@ -1,3 +0,0 @@
# Build and Release Definitions
The definitions found in this folder can be imported using Azure DevOps to provide a quick start for model CI/CD and Azure Resource Provisioning.

Просмотреть файл

@ -1,296 +0,0 @@
{
"options": [
{
"enabled": false,
"definition": {
"id": "5d58cc01-7c75-450c-be18-a388ddb129ec"
},
"inputs": {
"branchFilters": "[\"+refs/heads/*\"]",
"additionalFields": "{}"
}
},
{
"enabled": false,
"definition": {
"id": "a9db38f9-9fdc-478c-b0f9-464221e58316"
},
"inputs": {
"workItemType": "2418250",
"assignToRequestor": "true",
"additionalFields": "{}"
}
}
],
"triggers": [
{
"branchFilters": [
"+feature/*"
],
"pathFilters": [],
"batchChanges": false,
"maxConcurrentBuildsPerBranch": 1,
"pollingInterval": 0,
"triggerType": 2
}
],
"variables": {
"AMLWorkspaceName": {
"value": ""
},
"ResourceGroupName": {
"value": ""
},
"system.debug": {
"value": "false",
"allowOverride": true
}
},
"retentionRules": [
{
"branches": [
"+refs/heads/*"
],
"artifacts": [],
"artifactTypesToDelete": [
"FilePath",
"SymbolStore"
],
"daysToKeep": 10,
"minimumToKeep": 1,
"deleteBuildRecord": true,
"deleteTestResults": true
}
],
"properties": {},
"tags": [],
"_links": {
"self": {
"href": "https://dev.azure.com/account/123456789/_apis/build/Definitions/10?revision=7"
},
"web": {
"href": "https://dev.azure.com/account/123456789/_build/definition?definitionId=10"
},
"editor": {
"href": "https://dev.azure.com/account/123456789/_build/designer?id=10&_a=edit-build-definition"
},
"badge": {
"href": "https://dev.azure.com/account/123456789/_apis/build/status/10"
}
},
"jobAuthorizationScope": 1,
"jobTimeoutInMinutes": 60,
"jobCancelTimeoutInMinutes": 5,
"process": {
"phases": [
{
"steps": [
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Create Conda Environment",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "03dd16c3-43e0-4667-ba84-40515d27a410",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"createCustomEnvironment": "true",
"environmentName": "project_environment",
"packageSpecs": "Python=3.6 cython numpy",
"updateConda": "true",
"installOptions": "",
"createOptions": "",
"cleanEnvironment": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": true,
"alwaysRun": false,
"displayName": "Prepare Conda Environment (using yml)",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"script": "conda env list;\n\nconda env update -f ./conda_dependencies.yml",
"workingDirectory": "",
"failOnStderr": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": true,
"alwaysRun": false,
"displayName": "Unit tests (model code)",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"script": "pip install setuptools\npip install pytest\npython -m pytest \\\n -k \"not integration\" \\\n --junit-xml $(Build.BinariesDirectory)/unittest_report.xml",
"workingDirectory": "",
"failOnStderr": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": true,
"alwaysRun": false,
"displayName": "Code Quality (flake8)",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"script": "pip install flake8\npip install flake8_formatter_junit_xml\nflake8 --format junit-xml --output-file $(Build.BinariesDirectory)/flake8_report.xml --exit-zero\n",
"workingDirectory": "",
"failOnStderr": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Publish Unit Test Results",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "0b0f01ed-7dde-43ff-9cbb-e48954daf9b1",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"testRunner": "JUnit",
"testResultsFiles": "$(Build.BinariesDirectory)/*_report.xml",
"searchFolder": "$(System.DefaultWorkingDirectory)/Code/Modeling",
"mergeTestResults": "true",
"failTaskOnFailedTests": "false",
"testRunTitle": "",
"platform": "",
"configuration": "",
"publishRunAttachments": "true"
}
}
],
"name": "Agent Phase",
"refName": "Phase_2",
"condition": "succeeded()",
"target": {
"executionOptions": {
"type": 0
},
"allowScriptsAuthAccessOption": false,
"type": 1
},
"jobAuthorizationScope": 1,
"jobCancelTimeoutInMinutes": 1
}
],
"type": 1
},
"repository": {
"properties": {
"apiUrl": "https://api.github.com/repos/user/Azure-TDSP-DevOps-Template",
"branchesUrl": "https://api.github.com/repos/user/Azure-TDSP-DevOps-Template/branches",
"cloneUrl": "https://github.com/user/Azure-TDSP-DevOps-Template.git",
"fullName": "user/Azure-TDSP-DevOps-Template",
"manageUrl": "https://github.com/user/Azure-TDSP-DevOps-Template",
"refsUrl": "https://api.github.com/repos/user/Azure-TDSP-DevOps-Template/git/refs",
"defaultBranch": "master",
"connectedServiceId": "8f189c40-5897-4ada-9cce-df147f88fa57",
"isPrivate": "False",
"isFork": "True",
"ownerAvatarUrl": "https://avatars2.githubusercontent.com/u/872065?v=4",
"lastUpdated": "02/08/2019 13:09:48",
"nodeId": "MDEwOlJlcG9zaXRvcnkxNjE2OTgyMjc=",
"hasAdminPermissions": "True",
"safeOwnerId": "5848F3A00730FC1BBD623A19F26B0C66A2094333D2CF4382C9FD3483C02D5699D4A8932BB7FDE482121158ED9B8F66CE4A055D40DF47AB8C965FF7A9B8BA",
"ownerId": "872065",
"safeRepository": "Azure-TDSP-DevOps-Template",
"ownerIsAUser": "True",
"checkoutNestedSubmodules": "false",
"cleanOptions": "0",
"fetchDepth": "0",
"gitLfsSupport": "false",
"reportBuildStatus": "true",
"skipSyncSource": "false",
"labelSourcesFormat": "$(build.buildNumber)",
"labelSources": "0"
},
"id": "user/Azure-TDSP-DevOps-Template",
"type": "GitHub",
"name": "user/Azure-TDSP-DevOps-Template",
"url": "https://github.com/user/Azure-TDSP-DevOps-Template.git",
"defaultBranch": "master",
"clean": "false",
"checkoutSubmodules": false
},
"processParameters": {},
"quality": 1,
"authoredBy": {
"displayName": "user ",
"url": "https://spsprodweu3.vssps.visualstudio.com/Ae8b74699-309d-4bdc-b628-67015b7fd300/_apis/Identities/73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"_links": {
"avatar": {
"href": "https://dev.azure.com/account/_apis/GraphProfile/MemberAvatars/aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
}
},
"id": "73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"uniqueName": "account@microsoft.com",
"imageUrl": "https://dev.azure.com/account/_api/_common/identityImage?id=73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"descriptor": "aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
},
"drafts": [],
"queue": {
"_links": {
"self": {
"href": "https://dev.azure.com/account/_apis/build/Queues/35"
}
},
"id": 35,
"name": "Hosted Ubuntu 1604",
"url": "https://dev.azure.com/account/_apis/build/Queues/35",
"pool": {
"id": 6,
"name": "Hosted Ubuntu 1604",
"isHosted": true
}
},
"id": 10,
"name": "Code Quality",
"url": "https://dev.azure.com/account/123456789/_apis/build/Definitions/10?revision=7",
"uri": "vstfs:///Build/Definition/10",
"path": "\\",
"type": 2,
"queueStatus": 0,
"revision": 7,
"createdDate": "2019-02-09T17:14:45.473Z",
"project": {
"id": "123456789",
"name": "DevOps for AI",
"url": "https://dev.azure.com/account/_apis/projects/123456789",
"state": 1,
"revision": 57,
"visibility": 0,
"lastUpdateTime": "2018-08-15T15:07:27.100Z"
}
}

Просмотреть файл

@ -1,208 +0,0 @@
{
"options": [
{
"enabled": false,
"definition": {
"id": "5d58cc01-7c75-450c-be18-a388ddb129ec"
},
"inputs": {
"branchFilters": "[\"+refs/heads/*\"]",
"additionalFields": "{}"
}
},
{
"enabled": false,
"definition": {
"id": "a9db38f9-9fdc-478c-b0f9-464221e58316"
},
"inputs": {
"workItemType": "2418250",
"assignToRequestor": "true",
"additionalFields": "{}"
}
}
],
"triggers": [
{
"branchFilters": [
"+master"
],
"pathFilters": [
"+Code/Operationalization/provisioning/"
],
"batchChanges": false,
"maxConcurrentBuildsPerBranch": 1,
"pollingInterval": 0,
"triggerType": 2
}
],
"variables": {
"system.debug": {
"value": "false",
"allowOverride": true
}
},
"retentionRules": [
{
"branches": [
"+refs/heads/*"
],
"artifacts": [],
"artifactTypesToDelete": [
"FilePath",
"SymbolStore"
],
"daysToKeep": 10,
"minimumToKeep": 1,
"deleteBuildRecord": true,
"deleteTestResults": true
}
],
"properties": {},
"tags": [],
"_links": {
"self": {
"href": "https://dev.azure.com/user/3ec38c18-3edc-45c6-9a06-33069ee6e54e/_apis/build/Definitions/11?revision=4"
},
"web": {
"href": "https://dev.azure.com/user/3ec38c18-3edc-45c6-9a06-33069ee6e54e/_build/definition?definitionId=11"
},
"editor": {
"href": "https://dev.azure.com/user/3ec38c18-3edc-45c6-9a06-33069ee6e54e/_build/designer?id=11&_a=edit-build-definition"
},
"badge": {
"href": "https://dev.azure.com/user/3ec38c18-3edc-45c6-9a06-33069ee6e54e/_apis/build/status/11"
}
},
"jobAuthorizationScope": 1,
"jobTimeoutInMinutes": 60,
"jobCancelTimeoutInMinutes": 5,
"process": {
"phases": [
{
"steps": [
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Publish Resource Templates",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"PathtoPublish": "Code/Operationalization/provisioning",
"ArtifactName": "resourcetemplates",
"ArtifactType": "Container",
"TargetPath": "",
"Parallel": "false",
"ParallelCount": "8"
}
}
],
"name": "Agent job 1",
"refName": "Phase_1",
"condition": "succeeded()",
"target": {
"executionOptions": {
"type": 0
},
"allowScriptsAuthAccessOption": false,
"type": 1
},
"jobAuthorizationScope": 1,
"jobCancelTimeoutInMinutes": 1
}
],
"type": 1
},
"repository": {
"properties": {
"apiUrl": "https://api.github.com/repos/user/Azure-TDSP-DevOps-Template",
"branchesUrl": "https://api.github.com/repos/user/Azure-TDSP-DevOps-Template/branches",
"cloneUrl": "https://github.com/user/Azure-TDSP-DevOps-Template.git",
"fullName": "user/Azure-TDSP-DevOps-Template",
"manageUrl": "https://github.com/user/Azure-TDSP-DevOps-Template",
"refsUrl": "https://api.github.com/repos/user/Azure-TDSP-DevOps-Template/git/refs",
"defaultBranch": "master",
"connectedServiceId": "8f189c40-5897-4ada-9cce-df147f88fa57",
"isPrivate": "False",
"isFork": "True",
"ownerAvatarUrl": "https://avatars2.githubusercontent.com/u/872065?v=4",
"lastUpdated": "01/31/2019 17:04:58",
"nodeId": "MDEwOlJlcG9zaXRvcnkxNjE2OTgyMjc=",
"hasAdminPermissions": "True",
"safeOwnerId": "5848F3A00730FC1BBD623A19F26B0C66A2094333D2CF4382C9FD3483C02D5699D4A8932BB7FDE482121158ED9B8F66CE4A055D40DF47AB8C965FF7A9B8BA",
"safeRepository": "Azure-TDSP-DevOps-Template",
"ownerIsAUser": "True",
"checkoutNestedSubmodules": "false",
"cleanOptions": "0",
"fetchDepth": "0",
"gitLfsSupport": "false",
"reportBuildStatus": "true",
"skipSyncSource": "false",
"labelSourcesFormat": "$(build.buildNumber)",
"labelSources": "0"
},
"id": "user/Azure-TDSP-DevOps-Template",
"type": "GitHub",
"name": "user/Azure-TDSP-DevOps-Template",
"url": "https://github.com/user/Azure-TDSP-DevOps-Template.git",
"defaultBranch": "master",
"clean": "false",
"checkoutSubmodules": false
},
"processParameters": {},
"quality": 1,
"authoredBy": {
"displayName": "user",
"url": "https://spsprodweu3.vssps.visualstudio.com/Ae8b74699-309d-4bdc-b628-67015b7fd300/_apis/Identities/73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"_links": {
"avatar": {
"href": "https://dev.azure.com/user/_apis/GraphProfile/MemberAvatars/aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
}
},
"id": "73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"uniqueName": "user@microsoft.com",
"imageUrl": "https://dev.azure.com/user/_api/_common/identityImage?id=73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"descriptor": "aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
},
"drafts": [],
"queue": {
"_links": {
"self": {
"href": "https://dev.azure.com/user/_apis/build/Queues/29"
}
},
"id": 29,
"name": "Hosted VS2017",
"url": "https://dev.azure.com/user/_apis/build/Queues/29",
"pool": {
"id": 4,
"name": "Hosted VS2017",
"isHosted": true
}
},
"id": 11,
"name": "Publish Resource Templates",
"url": "https://dev.azure.com/user/3ec38c18-3edc-45c6-9a06-33069ee6e54e/_apis/build/Definitions/11?revision=4",
"uri": "vstfs:///Build/Definition/11",
"path": "\\",
"type": 2,
"queueStatus": 0,
"revision": 4,
"createdDate": "2019-01-31T20:48:12.967Z",
"project": {
"id": "3ec38c18-3edc-45c6-9a06-33069ee6e54e",
"name": "DevOps for AI",
"url": "https://dev.azure.com/user/_apis/projects/3ec38c18-3edc-45c6-9a06-33069ee6e54e",
"state": 1,
"revision": 57,
"visibility": 0,
"lastUpdateTime": "2018-08-15T15:07:27.100Z"
}
}

Просмотреть файл

@ -1,442 +0,0 @@
{
"options": [
{
"enabled": false,
"definition": {
"id": "5d58cc01-7c75-450c-be18-a388ddb129ec"
},
"inputs": {
"branchFilters": "[\"+refs/heads/*\"]",
"additionalFields": "{}"
}
},
{
"enabled": false,
"definition": {
"id": "a9db38f9-9fdc-478c-b0f9-464221e58316"
},
"inputs": {
"workItemType": "2418250",
"assignToRequestor": "true",
"additionalFields": "{}"
}
}
],
"triggers": [
{
"branchFilters": [
"+master"
],
"pathFilters": [],
"batchChanges": false,
"maxConcurrentBuildsPerBranch": 1,
"pollingInterval": 0,
"triggerType": 2
}
],
"variables": {
"AMLWorkspaceName": {
"value": ""
},
"ResourceGroupName": {
"value": ""
},
"system.debug": {
"value": "false",
"allowOverride": true
}
},
"variableGroups": [
{
"variables": {
"acrName": {
"value": "containerregistrydev"
},
"appInsightsName": {
"value": "appinsightsdev"
},
"azuremlwsname": {
"value": "mlworkspacedev"
},
"azureregion": {
"value": "westeurope"
},
"keyVaultName": {
"value": "keyvaultdev"
},
"rgname": {
"value": "devopsforaidev"
},
"storageAccountName": {
"value": "storagedev"
},
"subscriptionId": {
"value": "cf4e1704-b4bc-4554-bcd7-309394f2ee56"
}
},
"type": "Vsts",
"name": "dev",
"id": 3
}
],
"retentionRules": [
{
"branches": [
"+refs/heads/*"
],
"artifacts": [],
"artifactTypesToDelete": [
"FilePath",
"SymbolStore"
],
"daysToKeep": 10,
"minimumToKeep": 1,
"deleteBuildRecord": true,
"deleteTestResults": true
}
],
"properties": {},
"tags": [],
"_links": {
"self": {
"href": "https://dev.azure.com//3ec38c18-3edc-45c6-9a06-33069ee6e54e/_apis/build/Definitions/13?revision=62"
},
"web": {
"href": "https://dev.azure.com//3ec38c18-3edc-45c6-9a06-33069ee6e54e/_build/definition?definitionId=13"
},
"editor": {
"href": "https://dev.azure.com//3ec38c18-3edc-45c6-9a06-33069ee6e54e/_build/designer?id=13&_a=edit-build-definition"
},
"badge": {
"href": "https://dev.azure.com//3ec38c18-3edc-45c6-9a06-33069ee6e54e/_apis/build/status/13"
}
},
"jobAuthorizationScope": 1,
"jobTimeoutInMinutes": 60,
"jobCancelTimeoutInMinutes": 5,
"process": {
"phases": [
{
"steps": [
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Create env config file",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"targetType": "inline",
"filePath": "",
"arguments": "",
"script": "Write-Host \"Writing config file\"\n\n@{ subscription_id=\"$(subscriptionId)\"; resource_group=\"$(rgname)\"; workspace_name=\"$(azuremlwsname)\"} | ConvertTo-Json -depth 100 | Out-File \"aml_config/config.json\"",
"errorActionPreference": "stop",
"failOnStderr": "false",
"ignoreLASTEXITCODE": "false",
"pwsh": "false",
"workingDirectory": ""
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Conda: Create Environment",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "03dd16c3-43e0-4667-ba84-40515d27a410",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"createCustomEnvironment": "true",
"environmentName": "project_environment",
"packageSpecs": "",
"updateConda": "false",
"installOptions": "",
"createOptions": "",
"cleanEnvironment": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": true,
"alwaysRun": false,
"displayName": "Conda: Prepare Env (using requirements)",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"script": "conda env list;\n\nconda env update -f ./conda_dependencies.yml",
"workingDirectory": "",
"failOnStderr": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": true,
"alwaysRun": false,
"displayName": "Unit tests (model code)",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"script": "pip install setuptools\npip install pytest\npython -m pytest \\\n -k \"not integration\" \\\n --junit-xml $(Build.BinariesDirectory)/unittest_report.xml",
"workingDirectory": "",
"failOnStderr": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": true,
"alwaysRun": false,
"displayName": "Code Quality (flake8 linting)",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"script": "pip install flake8\npip install flake8_formatter_junit_xml\nflake8 --format junit-xml --output-file $(Build.BinariesDirectory)/flake8_report.xml --exit-zero\n",
"workingDirectory": "",
"failOnStderr": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Publish Unit Test Results",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "0b0f01ed-7dde-43ff-9cbb-e48954daf9b1",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"testRunner": "JUnit",
"testResultsFiles": "$(Build.BinariesDirectory)/*_report.xml",
"searchFolder": "$(System.DefaultWorkingDirectory)/Code/Modeling",
"mergeTestResults": "true",
"failTaskOnFailedTests": "false",
"testRunTitle": "",
"platform": "",
"configuration": "",
"publishRunAttachments": "true"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "ML Pipeline: Train Model",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "46e4be58-730b-4389-8a2f-ea10b3e5e815",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"connectedServiceNameARM": "b324cb93-5a35-4117-bd1d-03a0438c86a3",
"scriptLocation": "inlineScript",
"scriptPath": "",
"inlineScript": "python Code/Modeling/pipelines/pipeline_train.py",
"args": "",
"addSpnToEnvironment": "false",
"useGlobalConfig": "false",
"cwd": "",
"failOnStandardError": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": true,
"alwaysRun": false,
"displayName": "Integration + model validation tests",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"script": "echo \"integration\"",
"workingDirectory": "",
"failOnStderr": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Copy Model Training Artifacts",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "5bfb729a-a7c8-4a78-a7c3-8d717bb7c13c",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"SourceFolder": "outputs",
"Contents": "**",
"TargetFolder": "Code/Modeling/buildartifacts",
"CleanTargetFolder": "false",
"OverWrite": "false",
"flattenFolders": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Publish Model Artifacts",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"PathtoPublish": "Code/Modeling",
"ArtifactName": "modelartifacts",
"ArtifactType": "Container",
"TargetPath": "",
"Parallel": "false",
"ParallelCount": "8"
}
}
],
"name": "Agent Phase",
"refName": "Phase_2",
"condition": "succeeded()",
"target": {
"executionOptions": {
"type": 0
},
"allowScriptsAuthAccessOption": false,
"type": 1
},
"jobAuthorizationScope": 1,
"jobCancelTimeoutInMinutes": 1
}
],
"type": 1
},
"repository": {
"properties": {
"apiUrl": "https://api.github.com/repos/user/Azure-TDSP-DevOps-Template",
"branchesUrl": "https://api.github.com/repos/user/Azure-TDSP-DevOps-Template/branches",
"cloneUrl": "https://github.com/user/Azure-TDSP-DevOps-Template.git",
"fullName": "user/Azure-TDSP-DevOps-Template",
"manageUrl": "https://github.com/user/Azure-TDSP-DevOps-Template",
"refsUrl": "https://api.github.com/repos/user/Azure-TDSP-DevOps-Template/git/refs",
"defaultBranch": "master",
"connectedServiceId": "8f189c40-5897-4ada-9cce-df147f88fa57",
"isPrivate": "False",
"isFork": "True",
"ownerAvatarUrl": "https://avatars2.githubusercontent.com/u/872065?v=4",
"lastUpdated": "02/09/2019 21:51:45",
"nodeId": "MDEwOlJlcG9zaXRvcnkxNjE2OTgyMjc=",
"hasAdminPermissions": "True",
"safeOwnerId": "5848F3A00730FC1BBD623A19F26B0C66A2094333D2CF4382C9FD3483C02D5699D4A8932BB7FDE482121158ED9B8F66CE4A055D40DF47AB8C965FF7A9B8BA",
"ownerId": "872065",
"safeRepository": "Azure-TDSP-DevOps-Template",
"ownerIsAUser": "True",
"checkoutNestedSubmodules": "false",
"cleanOptions": "0",
"fetchDepth": "0",
"gitLfsSupport": "false",
"reportBuildStatus": "true",
"skipSyncSource": "false",
"labelSourcesFormat": "$(build.buildNumber)",
"labelSources": "0"
},
"id": "user/Azure-TDSP-DevOps-Template",
"type": "GitHub",
"name": "user/Azure-TDSP-DevOps-Template",
"url": "https://github.com/user/Azure-TDSP-DevOps-Template.git",
"defaultBranch": "master",
"clean": "false",
"checkoutSubmodules": false
},
"processParameters": {},
"quality": 1,
"authoredBy": {
"displayName": "user",
"url": "https://spsprodweu3.vssps.visualstudio.com/Ae8b74699-309d-4bdc-b628-67015b7fd300/_apis/Identities/73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"_links": {
"avatar": {
"href": "https://dev.azure.com//_apis/GraphProfile/MemberAvatars/aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
}
},
"id": "73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"uniqueName": "@microsoft.com",
"imageUrl": "https://dev.azure.com//_api/_common/identityImage?id=73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"descriptor": "aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
},
"drafts": [],
"queue": {
"_links": {
"self": {
"href": "https://dev.azure.com//_apis/build/Queues/35"
}
},
"id": 35,
"name": "Hosted Ubuntu 1604",
"url": "https://dev.azure.com//_apis/build/Queues/35",
"pool": {
"id": 6,
"name": "Hosted Ubuntu 1604",
"isHosted": true
}
},
"id": 13,
"name": "Build Model",
"url": "https://dev.azure.com//3ec38c18-3edc-45c6-9a06-33069ee6e54e/_apis/build/Definitions/13?revision=62",
"uri": "vstfs:///Build/Definition/13",
"path": "\\",
"type": 2,
"queueStatus": 0,
"revision": 62,
"createdDate": "2019-02-10T20:19:04.293Z",
"project": {
"id": "3ec38c18-3edc-45c6-9a06-33069ee6e54e",
"name": "DevOps for AI",
"url": "https://dev.azure.com//_apis/projects/3ec38c18-3edc-45c6-9a06-33069ee6e54e",
"state": 1,
"revision": 57,
"visibility": 0,
"lastUpdateTime": "2018-08-15T15:07:27.100Z"
}
}

Просмотреть файл

@ -1,515 +0,0 @@
{
"source": 2,
"revision": 108,
"description": null,
"createdBy": {
"displayName": "",
"url": "https://spsprodweu3.vssps.visualstudio.com/Ae8b74699-309d-4bdc-b628-67015b7fd300/_apis/Identities/73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"_links": {
"avatar": {
"href": "https://dev.azure.com/account/_apis/GraphProfile/MemberAvatars/aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
}
},
"id": "73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"uniqueName": "account@microsoft.com",
"imageUrl": "https://dev.azure.com/account/_api/_common/identityImage?id=73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"descriptor": "aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
},
"createdOn": "2019-01-24T09:38:09.313Z",
"modifiedBy": {
"displayName": "user",
"url": "https://spsprodweu3.vssps.visualstudio.com/Ae8b74699-309d-4bdc-b628-67015b7fd300/_apis/Identities/73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"_links": {
"avatar": {
"href": "https://dev.azure.com/account/_apis/GraphProfile/MemberAvatars/aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
}
},
"id": "73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"uniqueName": "account@microsoft.com",
"imageUrl": "https://dev.azure.com/account/_api/_common/identityImage?id=73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"descriptor": "aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
},
"modifiedOn": "2019-02-07T15:25:50.063Z",
"isDeleted": false,
"variables": {},
"variableGroups": [],
"environments": [
{
"id": 4,
"name": "Dev",
"rank": 1,
"owner": {
"displayName": "Name",
"url": "https://spsprodweu3.vssps.visualstudio.com/Ae8b74699-309d-4bdc-b628-67015b7fd300/_apis/Identities/73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"_links": {
"avatar": {
"href": "https://dev.azure.com/account/_apis/GraphProfile/MemberAvatars/aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
}
},
"id": "73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"uniqueName": "account@microsoft.com",
"imageUrl": "https://dev.azure.com/account/_api/_common/identityImage?id=73d4d2b0-9cab-6c6a-aac6-551eaf5c4672",
"descriptor": "aad.NzNkNGQyYjAtOWNhYi03YzZhLWFhYzYtNTUxZWFmNWM0Njcy"
},
"variables": {},
"variableGroups": [
3
],
"preDeployApprovals": {
"approvals": [
{
"rank": 1,
"isAutomated": true,
"isNotificationOn": false,
"id": 18
}
],
"approvalOptions": {
"requiredApproverCount": null,
"releaseCreatorCanBeApprover": false,
"autoTriggeredAndPreviousEnvironmentApprovedCanBeSkipped": false,
"enforceIdentityRevalidation": false,
"timeoutInMinutes": 0,
"executionOrder": 1
}
},
"deployStep": {
"id": 19
},
"postDeployApprovals": {
"approvals": [
{
"rank": 1,
"isAutomated": true,
"isNotificationOn": false,
"id": 20
}
],
"approvalOptions": {
"requiredApproverCount": null,
"releaseCreatorCanBeApprover": false,
"autoTriggeredAndPreviousEnvironmentApprovedCanBeSkipped": false,
"enforceIdentityRevalidation": false,
"timeoutInMinutes": 0,
"executionOrder": 2
}
},
"deployPhases": [
{
"deploymentInput": {
"parallelExecution": {
"parallelExecutionType": 0
},
"skipArtifactsDownload": false,
"artifactsDownloadInput": {
"downloadInputs": [
{
"artifactItems": [],
"alias": "resourcetemplates",
"artifactType": "Build",
"artifactDownloadMode": "All"
}
]
},
"queueId": 35,
"demands": [],
"enableAccessToken": false,
"timeoutInMinutes": 0,
"jobCancelTimeoutInMinutes": 1,
"condition": "succeeded()",
"overrideInputs": {}
},
"rank": 1,
"phaseType": 1,
"name": "Agent job",
"refName": null,
"workflowTasks": [
{
"environment": {},
"taskId": "94a74903-f93f-4075-884f-dc11f34058b4",
"version": "2.*",
"name": "Azure Deployment: Application Insights",
"refName": "",
"enabled": true,
"alwaysRun": false,
"continueOnError": false,
"timeoutInMinutes": 0,
"definitionType": "task",
"overrideInputs": {},
"condition": "succeeded()",
"inputs": {
"ConnectedServiceName": "b324cb93-5a35-4117-bd1d-03a0438c86a3",
"action": "Create Or Update Resource Group",
"resourceGroupName": "$(rgname)",
"location": "$(azureregion)",
"templateLocation": "Linked artifact",
"csmFileLink": "",
"csmParametersFileLink": "",
"csmFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/appinsights/template.json",
"csmParametersFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/appinsights/parameters.json",
"overrideParameters": "-name $(appInsightsName) -type \"other\" -regionId $(azureregion) -requestSource \"IbizaAIExtension\"",
"deploymentMode": "Incremental",
"enableDeploymentPrerequisites": "None",
"deploymentGroupEndpoint": "",
"project": "",
"deploymentGroupName": "",
"copyAzureVMTags": "true",
"runAgentServiceAsUser": "false",
"userName": "",
"password": "",
"outputVariable": "",
"deploymentName": "",
"deploymentOutputs": "AppInsightsOutput"
}
},
{
"environment": {},
"taskId": "94a74903-f93f-4075-884f-dc11f34058b4",
"version": "2.*",
"name": "Azure Deployment: Storage",
"refName": "",
"enabled": true,
"alwaysRun": false,
"continueOnError": false,
"timeoutInMinutes": 0,
"definitionType": "task",
"overrideInputs": {},
"condition": "succeeded()",
"inputs": {
"ConnectedServiceName": "b324cb93-5a35-4117-bd1d-03a0438c86a3",
"action": "Create Or Update Resource Group",
"resourceGroupName": "$(rgname)",
"location": "$(azureregion)",
"templateLocation": "Linked artifact",
"csmFileLink": "",
"csmParametersFileLink": "",
"csmFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/storage/template.json",
"csmParametersFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/storage/parameters.json",
"overrideParameters": "-location $(azureRegion) -storageAccountName $(storageAccountName) -accountType \"Standard_RAGRS\" -kind \"StorageV2\" -accessTier \"Hot\" -supportsHttpsTrafficOnly true",
"deploymentMode": "Incremental",
"enableDeploymentPrerequisites": "None",
"deploymentGroupEndpoint": "",
"project": "",
"deploymentGroupName": "",
"copyAzureVMTags": "true",
"runAgentServiceAsUser": "false",
"userName": "",
"password": "",
"outputVariable": "",
"deploymentName": "",
"deploymentOutputs": "StorageOutput"
}
},
{
"environment": {},
"taskId": "94a74903-f93f-4075-884f-dc11f34058b4",
"version": "2.*",
"name": "Azure Deployment: Key Vault",
"refName": "",
"enabled": true,
"alwaysRun": false,
"continueOnError": false,
"timeoutInMinutes": 0,
"definitionType": "task",
"overrideInputs": {},
"condition": "succeeded()",
"inputs": {
"ConnectedServiceName": "b324cb93-5a35-4117-bd1d-03a0438c86a3",
"action": "Create Or Update Resource Group",
"resourceGroupName": "$(rgname)",
"location": "$(azureregion)",
"templateLocation": "Linked artifact",
"csmFileLink": "",
"csmParametersFileLink": "",
"csmFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/keyvault/template.json",
"csmParametersFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/keyvault/parameters.json",
"overrideParameters": "-name $(keyVaultName) -location $(azureregion) -tenantId \"72f988bf-86f1-41af-91ab-2d7cd011db47\" -objectId \"357e0b82-f4db-4e11-b279-654fd1b93629\"",
"deploymentMode": "Incremental",
"enableDeploymentPrerequisites": "None",
"deploymentGroupEndpoint": "",
"project": "",
"deploymentGroupName": "",
"copyAzureVMTags": "true",
"runAgentServiceAsUser": "false",
"userName": "",
"password": "",
"outputVariable": "",
"deploymentName": "",
"deploymentOutputs": "KeyVaultOutput"
}
},
{
"environment": {},
"taskId": "94a74903-f93f-4075-884f-dc11f34058b4",
"version": "2.*",
"name": "Azure Deployment: Container Registry",
"refName": "",
"enabled": true,
"alwaysRun": false,
"continueOnError": false,
"timeoutInMinutes": 0,
"definitionType": "task",
"overrideInputs": {},
"condition": "succeeded()",
"inputs": {
"ConnectedServiceName": "b324cb93-5a35-4117-bd1d-03a0438c86a3",
"action": "Create Or Update Resource Group",
"resourceGroupName": "$(rgname)",
"location": "$(azureregion)",
"templateLocation": "Linked artifact",
"csmFileLink": "",
"csmParametersFileLink": "",
"csmFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/containerregistry/template.json",
"csmParametersFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/containerregistry/parameters.json",
"overrideParameters": "-name $(acrName) -location $(azureregion)",
"deploymentMode": "Incremental",
"enableDeploymentPrerequisites": "None",
"deploymentGroupEndpoint": "",
"project": "",
"deploymentGroupName": "",
"copyAzureVMTags": "true",
"runAgentServiceAsUser": "false",
"userName": "",
"password": "",
"outputVariable": "",
"deploymentName": "",
"deploymentOutputs": "ContainerRegistryOutput"
}
},
{
"environment": {},
"taskId": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1",
"version": "2.*",
"name": "Parse outputs: collect resource IDs",
"refName": "",
"enabled": true,
"alwaysRun": false,
"continueOnError": false,
"timeoutInMinutes": 0,
"definitionType": "task",
"overrideInputs": {},
"condition": "succeeded()",
"inputs": {
"targetType": "inline",
"filePath": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/parseresourceids.ps1",
"arguments": "",
"script": "# parse ARM deployment json output\n$appInsights = '$(AppInsightsOutput)' | ConvertFrom-Json\n$storage = '$(StorageOutput)' | ConvertFrom-Json\n$keyvault = '$(KeyVaultOutput)' | ConvertFrom-Json\n$acr = '$(ContainerRegistryOutput)' | ConvertFrom-Json\n\nWrite-Output \"Parsed JSON\"\n\n# save temporary variable\n$appInsightsId = $appInsights.resourceId.value\n$storageId = $storage.resourceId.value\n$keyvaultId = $keyvault.resourceId.value\n$acrId = $acr.resourceId.value\n\n# export Azure DevOps Pipelines variables\nWrite-Host \"Writing resource ids as variables\"\nWrite-Host \"##vso[task.setvariable variable=appInsightsId]$appInsightsId\"\nWrite-Host \"##vso[task.setvariable variable=storageId]$storageId\"\nWrite-Host \"##vso[task.setvariable variable=keyvaultId]$keyvaultId\"\nWrite-Host \"##vso[task.setvariable variable=acrId]$acrId\"",
"errorActionPreference": "stop",
"failOnStderr": "false",
"ignoreLASTEXITCODE": "false",
"pwsh": "false",
"workingDirectory": ""
}
},
{
"environment": {},
"taskId": "94a74903-f93f-4075-884f-dc11f34058b4",
"version": "2.*",
"name": "Azure Deployment: ML Workspace",
"refName": "",
"enabled": true,
"alwaysRun": false,
"continueOnError": false,
"timeoutInMinutes": 0,
"definitionType": "task",
"overrideInputs": {},
"condition": "succeeded()",
"inputs": {
"ConnectedServiceName": "b324cb93-5a35-4117-bd1d-03a0438c86a3",
"action": "Create Or Update Resource Group",
"resourceGroupName": "$(rgname)",
"location": "$(azureregion)",
"templateLocation": "Linked artifact",
"csmFileLink": "",
"csmParametersFileLink": "",
"csmFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/mlworkspace/template.json",
"csmParametersFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/mlworkspace/parameters.json",
"overrideParameters": "-machineLearningApiVersion \"2018-11-19\" -description \"\" -friendlyName \"\" -location $(azureregion) -name $(azuremlwsname) -storageAccountId $(storageId) -appInsightsId $(appInsightsId) -containerRegistryId $(acrId) -keyVaultId $(keyvaultId)",
"deploymentMode": "Incremental",
"enableDeploymentPrerequisites": "None",
"deploymentGroupEndpoint": "",
"project": "",
"deploymentGroupName": "",
"copyAzureVMTags": "true",
"runAgentServiceAsUser": "false",
"userName": "",
"password": "",
"outputVariable": "",
"deploymentName": "",
"deploymentOutputs": "workspace"
}
},
{
"environment": {},
"taskId": "94a74903-f93f-4075-884f-dc11f34058b4",
"version": "2.*",
"name": "Azure Deployment: ML Compute",
"refName": "",
"enabled": true,
"alwaysRun": false,
"continueOnError": false,
"timeoutInMinutes": 0,
"definitionType": "task",
"overrideInputs": {},
"condition": "succeeded()",
"inputs": {
"ConnectedServiceName": "b324cb93-5a35-4117-bd1d-03a0438c86a3",
"action": "Create Or Update Resource Group",
"resourceGroupName": "$(rgname)",
"location": "$(azureregion)",
"templateLocation": "Linked artifact",
"csmFileLink": "",
"csmParametersFileLink": "",
"csmFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/mlcompute/template.json",
"csmParametersFile": "$(System.DefaultWorkingDirectory)/resourcetemplates/resourcetemplates/mlcompute/parameters.json",
"overrideParameters": "-workspaceName $(azuremlwsname) -clusterName \"dsvmcluster\" -minNodeCount 0 -maxNodeCount 1 -workspacelocation $(azureregion) -adminUserName \"account\" -adminUserPassword \"tdspTemplate1!\" -vmSize \"Standard_D3_V2\"",
"deploymentMode": "Incremental",
"enableDeploymentPrerequisites": "None",
"deploymentGroupEndpoint": "",
"project": "",
"deploymentGroupName": "",
"copyAzureVMTags": "true",
"runAgentServiceAsUser": "false",
"userName": "",
"password": "",
"outputVariable": "",
"deploymentName": "",
"deploymentOutputs": "workspace"
}
}
]
}
],
"environmentOptions": {
"emailNotificationType": "OnlyOnFailure",
"emailRecipients": "release.environment.owner;release.creator",
"skipArtifactsDownload": false,
"timeoutInMinutes": 0,
"enableAccessToken": false,
"publishDeploymentStatus": true,
"badgeEnabled": false,
"autoLinkWorkItems": false,
"pullRequestDeploymentEnabled": false
},
"demands": [],
"conditions": [
{
"name": "ReleaseStarted",
"conditionType": 1,
"value": ""
}
],
"executionPolicy": {
"concurrencyCount": 1,
"queueDepthCount": 0
},
"schedules": [],
"currentRelease": {
"id": 174,
"url": "https://vsrm.dev.azure.com/account/12345678/_apis/Release/releases/174",
"_links": {}
},
"retentionPolicy": {
"daysToKeep": 30,
"releasesToKeep": 3,
"retainBuild": true
},
"processParameters": {},
"properties": {},
"preDeploymentGates": {
"id": 0,
"gatesOptions": null,
"gates": []
},
"postDeploymentGates": {
"id": 0,
"gatesOptions": null,
"gates": []
},
"environmentTriggers": [],
"badgeUrl": "https://vsrm.dev.azure.com/account/_apis/public/Release/badge/12345678/2/4"
}
],
"artifacts": [
{
"sourceId": "12345678:11",
"type": "Build",
"alias": "resourcetemplates",
"definitionReference": {
"artifactSourceDefinitionUrl": {
"id": "https://dev.azure.com/account/_permalink/_build/index?collectionId=d7938a7e-2dbb-412d-bce1-3608e05e59c0&projectId=12345678&definitionId=11",
"name": ""
},
"defaultVersionBranch": {
"id": "",
"name": ""
},
"defaultVersionSpecific": {
"id": "",
"name": ""
},
"defaultVersionTags": {
"id": "",
"name": ""
},
"defaultVersionType": {
"id": "selectDuringReleaseCreationType",
"name": "Specify at the time of release creation"
},
"definition": {
"id": "11",
"name": "Publish Resource Templates"
},
"definitions": {
"id": "",
"name": ""
},
"IsMultiDefinitionType": {
"id": "False",
"name": "False"
},
"project": {
"id": "12345678",
"name": "DevOps for AI"
},
"repository": {
"id": "b5f121ca-e7e2-4e25-9e63-636960663e94",
"name": "DevOpsforAI-template"
}
},
"isPrimary": true,
"isRetained": false
}
],
"triggers": [
{
"artifactAlias": "resourcetemplates",
"triggerConditions": [],
"triggerType": 1
}
],
"releaseNameFormat": "Release-$(rev:r)",
"tags": [],
"pipelineProcess": {
"type": 1
},
"properties": {
"DefinitionCreationSource": {
"$type": "System.String",
"$value": "Other"
}
},
"id": 2,
"name": "Provision Resources",
"path": "\\",
"projectReference": null,
"url": "https://vsrm.dev.azure.com/account/12345678/_apis/Release/definitions/2",
"_links": {
"self": {
"href": "https://vsrm.dev.azure.com/account/12345678/_apis/Release/definitions/2"
},
"web": {
"href": "https://dev.azure.com/account/12345678/_release?definitionId=2"
}
}
}

Просмотреть файл

@ -1,5 +0,0 @@
# @TODO
# Deserialize training pipeline
# Run training pipeline
# Deserialize scoring pipeline
# Deploy scoring pipeline

Просмотреть файл

@ -1 +0,0 @@
# @TODO

Просмотреть файл

@ -1,2 +0,0 @@
# Deployment templates #

Просмотреть файл

@ -1,115 +0,0 @@
// Requires the following Azure NuGet packages and related dependencies:
// package id="Microsoft.Azure.Management.Authorization" version="2.0.0"
// package id="Microsoft.Azure.Management.ResourceManager" version="1.4.0-preview"
// package id="Microsoft.Rest.ClientRuntime.Azure.Authentication" version="2.2.8-preview"
using Microsoft.Azure.Management.ResourceManager;
using Microsoft.Azure.Management.ResourceManager.Models;
using Microsoft.Rest.Azure.Authentication;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.IO;
namespace PortalGenerated
{
/// <summary>
/// This is a helper class for deploying an Azure Resource Manager template
/// More info about template deployments can be found here https://go.microsoft.com/fwLink/?LinkID=733371
/// </summary>
class DeploymentHelper
{
string subscriptionId = "your-subscription-id";
string clientId = "your-service-principal-clientId";
string clientSecret = "your-service-principal-client-secret";
string resourceGroupName = "resource-group-name";
string deploymentName = "deployment-name";
string resourceGroupLocation = "resource-group-location"; // must be specified for creating a new resource group
string pathToTemplateFile = "path-to-template.json-on-disk";
string pathToParameterFile = "path-to-parameters.json-on-disk";
string tenantId = "tenant-id";
public async void Run()
{
// Try to obtain the service credentials
var serviceCreds = await ApplicationTokenProvider.LoginSilentAsync(tenantId, clientId, clientSecret);
// Read the template and parameter file contents
JObject templateFileContents = GetJsonFileContents(pathToTemplateFile);
JObject parameterFileContents = GetJsonFileContents(pathToParameterFile);
// Create the resource manager client
var resourceManagementClient = new ResourceManagementClient(serviceCreds);
resourceManagementClient.SubscriptionId = subscriptionId;
// Create or check that resource group exists
EnsureResourceGroupExists(resourceManagementClient, resourceGroupName, resourceGroupLocation);
// Start a deployment
DeployTemplate(resourceManagementClient, resourceGroupName, deploymentName, templateFileContents, parameterFileContents);
}
/// <summary>
/// Reads a JSON file from the specified path
/// </summary>
/// <param name="pathToJson">The full path to the JSON file</param>
/// <returns>The JSON file contents</returns>
private JObject GetJsonFileContents(string pathToJson)
{
JObject templatefileContent = new JObject();
using (StreamReader file = File.OpenText(pathToJson))
{
using (JsonTextReader reader = new JsonTextReader(file))
{
templatefileContent = (JObject)JToken.ReadFrom(reader);
return templatefileContent;
}
}
}
/// <summary>
/// Ensures that a resource group with the specified name exists. If it does not, will attempt to create one.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="resourceGroupLocation">The resource group location. Required when creating a new resource group.</param>
private static void EnsureResourceGroupExists(ResourceManagementClient resourceManagementClient, string resourceGroupName, string resourceGroupLocation)
{
if (resourceManagementClient.ResourceGroups.CheckExistence(resourceGroupName) != true)
{
Console.WriteLine(string.Format("Creating resource group '{0}' in location '{1}'", resourceGroupName, resourceGroupLocation));
var resourceGroup = new ResourceGroup();
resourceGroup.Location = resourceGroupLocation;
resourceManagementClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup);
}
else
{
Console.WriteLine(string.Format("Using existing resource group '{0}'", resourceGroupName));
}
}
/// <summary>
/// Starts a template deployment.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="deploymentName">The name of the deployment.</param>
/// <param name="templateFileContents">The template file contents.</param>
/// <param name="parameterFileContents">The parameter file contents.</param>
private static void DeployTemplate(ResourceManagementClient resourceManagementClient, string resourceGroupName, string deploymentName, JObject templateFileContents, JObject parameterFileContents)
{
Console.WriteLine(string.Format("Starting template deployment '{0}' in resource group '{1}'", deploymentName, resourceGroupName));
var deployment = new Deployment();
deployment.Properties = new DeploymentProperties
{
Mode = DeploymentMode.Incremental,
Template = templateFileContents,
Parameters = parameterFileContents["parameters"].ToObject<JObject>()
};
var deploymentResult = resourceManagementClient.Deployments.CreateOrUpdate(resourceGroupName, deploymentName, deployment);
Console.WriteLine(string.Format("Deployment status: {0}", deploymentResult.Properties.ProvisioningState));
}
}
}

Просмотреть файл

@ -1,107 +0,0 @@
<#
.SYNOPSIS
Deploys a template to Azure
.DESCRIPTION
Deploys an Azure Resource Manager template
.PARAMETER subscriptionId
The subscription id where the template will be deployed.
.PARAMETER resourceGroupName
The resource group where the template will be deployed. Can be the name of an existing or a new resource group.
.PARAMETER resourceGroupLocation
Optional, a resource group location. If specified, will try to create a new resource group in this location. If not specified, assumes resource group is existing.
.PARAMETER deploymentName
The deployment name.
.PARAMETER templateFilePath
Optional, path to the template file. Defaults to template.json.
.PARAMETER parametersFilePath
Optional, path to the parameters file. Defaults to parameters.json. If file is not found, will prompt for parameter values based on template.
#>
param(
[Parameter(Mandatory=$True)]
[string]
$subscriptionId,
[Parameter(Mandatory=$True)]
[string]
$resourceGroupName,
[string]
$resourceGroupLocation,
[Parameter(Mandatory=$True)]
[string]
$deploymentName,
[string]
$templateFilePath = "template.json",
[string]
$parametersFilePath = "parameters.json"
)
<#
.SYNOPSIS
Registers RPs
#>
Function RegisterRP {
Param(
[string]$ResourceProviderNamespace
)
Write-Host "Registering resource provider '$ResourceProviderNamespace'";
Register-AzureRmResourceProvider -ProviderNamespace $ResourceProviderNamespace;
}
#******************************************************************************
# Script body
# Execution begins here
#******************************************************************************
$ErrorActionPreference = "Stop"
# sign in
Write-Host "Logging in...";
Login-AzureRmAccount;
# select subscription
Write-Host "Selecting subscription '$subscriptionId'";
Select-AzureRmSubscription -SubscriptionID $subscriptionId;
# Register RPs
$resourceProviders = @("microsoft.insights");
if($resourceProviders.length) {
Write-Host "Registering resource providers"
foreach($resourceProvider in $resourceProviders) {
RegisterRP($resourceProvider);
}
}
#Create or check for existing resource group
$resourceGroup = Get-AzureRmResourceGroup -Name $resourceGroupName -ErrorAction SilentlyContinue
if(!$resourceGroup)
{
Write-Host "Resource group '$resourceGroupName' does not exist. To create a new resource group, please enter a location.";
if(!$resourceGroupLocation) {
$resourceGroupLocation = Read-Host "resourceGroupLocation";
}
Write-Host "Creating resource group '$resourceGroupName' in location '$resourceGroupLocation'";
New-AzureRmResourceGroup -Name $resourceGroupName -Location $resourceGroupLocation
}
else{
Write-Host "Using existing resource group '$resourceGroupName'";
}
# Start the deployment
Write-Host "Starting deployment...";
if(Test-Path $parametersFilePath) {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -Name $deploymentName -TemplateFile $templateFilePath -TemplateParameterFile $parametersFilePath;
} else {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -Name $deploymentName -TemplateFile $templateFilePath;
}

Просмотреть файл

@ -1,122 +0,0 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
# -e: immediately exit if any command has a non-zero exit status
# -o: prevents errors in a pipeline from being masked
# IFS new value is less likely to cause confusing bugs when looping arrays or arguments (e.g. $@)
usage() { echo "Usage: $0 -i <subscriptionId> -g <resourceGroupName> -n <deploymentName> -l <resourceGroupLocation>" 1>&2; exit 1; }
declare subscriptionId=""
declare resourceGroupName=""
declare deploymentName=""
declare resourceGroupLocation=""
# Initialize parameters specified from command line
while getopts ":i:g:n:l:" arg; do
case "${arg}" in
i)
subscriptionId=${OPTARG}
;;
g)
resourceGroupName=${OPTARG}
;;
n)
deploymentName=${OPTARG}
;;
l)
resourceGroupLocation=${OPTARG}
;;
esac
done
shift $((OPTIND-1))
#Prompt for parameters is some required parameters are missing
if [[ -z "$subscriptionId" ]]; then
echo "Your subscription ID can be looked up with the CLI using: az account show --out json "
echo "Enter your subscription ID:"
read subscriptionId
[[ "${subscriptionId:?}" ]]
fi
if [[ -z "$resourceGroupName" ]]; then
echo "This script will look for an existing resource group, otherwise a new one will be created "
echo "You can create new resource groups with the CLI using: az group create "
echo "Enter a resource group name"
read resourceGroupName
[[ "${resourceGroupName:?}" ]]
fi
if [[ -z "$deploymentName" ]]; then
echo "Enter a name for this deployment:"
read deploymentName
fi
if [[ -z "$resourceGroupLocation" ]]; then
echo "If creating a *new* resource group, you need to set a location "
echo "You can lookup locations with the CLI using: az account list-locations "
echo "Enter resource group location:"
read resourceGroupLocation
fi
#templateFile Path - template file to be used
templateFilePath="template.json"
if [ ! -f "$templateFilePath" ]; then
echo "$templateFilePath not found"
exit 1
fi
#parameter file path
parametersFilePath="parameters.json"
if [ ! -f "$parametersFilePath" ]; then
echo "$parametersFilePath not found"
exit 1
fi
if [ -z "$subscriptionId" ] || [ -z "$resourceGroupName" ] || [ -z "$deploymentName" ]; then
echo "Either one of subscriptionId, resourceGroupName, deploymentName is empty"
usage
fi
#login to azure using your credentials
az account show 1> /dev/null
if [ $? != 0 ];
then
az login
fi
#set the default subscription id
az account set --subscription $subscriptionId
set +e
#Check for existing RG
az group show --name $resourceGroupName 1> /dev/null
if [ $? != 0 ]; then
echo "Resource group with name" $resourceGroupName "could not be found. Creating new resource group.."
set -e
(
set -x
az group create --name $resourceGroupName --location $resourceGroupLocation 1> /dev/null
)
else
echo "Using existing resource group..."
fi
#Start deployment
echo "Starting deployment..."
(
set -x
az group deployment create --name "$deploymentName" --resource-group "$resourceGroupName" --template-file "$templateFilePath" --parameters "@${parametersFilePath}"
)
if [ $? == 0 ];
then
echo "Template has been successfully deployed"
fi

Просмотреть файл

@ -1,71 +0,0 @@
require 'azure_mgmt_resources'
class Deployer
# Initialize the deployer class with subscription, resource group and resource group location. The class will raise an
# ArgumentError if there are empty values for Tenant Id, Client Id or Client Secret environment variables.
#
# @param [String] subscription_id the subscription to deploy the template
# @param [String] resource_group the resource group to create or update and then deploy the template
# @param [String] resource_group_location the location of the resource group
def initialize(subscription_id, resource_group, resource_group_location)
raise ArgumentError.new("Missing template file 'template.json' in current directory.") unless File.exist?('template.json')
raise ArgumentError.new("Missing parameters file 'parameters.json' in current directory.") unless File.exist?('parameters.json')
@resource_group = resource_group
@subscription_id = subscription_id
@resource_group_location = resource_group_location
provider = MsRestAzure::ApplicationTokenProvider.new(
ENV['AZURE_TENANT_ID'],
ENV['AZURE_CLIENT_ID'],
ENV['AZURE_CLIENT_SECRET'])
credentials = MsRest::TokenCredentials.new(provider)
@client = Azure::ARM::Resources::ResourceManagementClient.new(credentials)
@client.subscription_id = @subscription_id
end
# Deploy the template to a resource group
def deploy
# ensure the resource group is created
params = Azure::ARM::Resources::Models::ResourceGroup.new.tap do |rg|
rg.location = @resource_group_location
end
@client.resource_groups.create_or_update(@resource_group, params).value!
# build the deployment from a json file template from parameters
template = File.read(File.expand_path(File.join(__dir__, 'template.json')))
deployment = Azure::ARM::Resources::Models::Deployment.new
deployment.properties = Azure::ARM::Resources::Models::DeploymentProperties.new
deployment.properties.template = JSON.parse(template)
deployment.properties.mode = Azure::ARM::Resources::Models::DeploymentMode::Incremental
# build the deployment template parameters from Hash to {key: {value: value}} format
deploy_params = File.read(File.expand_path(File.join(__dir__, 'parameters.json')))
deployment.properties.parameters = JSON.parse(deploy_params)["parameters"]
# put the deployment to the resource group
@client.deployments.create_or_update(@resource_group, 'azure-sample', deployment)
end
end
# Get user inputs and execute the script
if(ARGV.empty?)
puts "Please specify subscriptionId resourceGroupName resourceGroupLocation as command line arguments"
exit
end
subscription_id = ARGV[0] # Azure Subscription Id
resource_group = ARGV[1] # The resource group for deployment
resource_group_location = ARGV[2] # The resource group location
msg = "\nInitializing the Deployer class with subscription id: #{subscription_id}, resource group: #{resource_group}"
msg += "\nand resource group location: #{resource_group_location}...\n\n"
puts msg
# Initialize the deployer class
deployer = Deployer.new(subscription_id, resource_group, resource_group_location)
puts "Beginning the deployment... \n\n"
# Deploy the template
deployment = deployer.deploy
puts "Done deploying!!"

Просмотреть файл

@ -1,18 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"type": {
"value": "other"
},
"name": {
"value": "appinsights"
},
"regionId": {
"value": "westeurope"
},
"requestSource": {
"value": "IbizaAIExtension"
}
}
}

Просмотреть файл

@ -1,38 +0,0 @@
{
"$schema": "http://schema.management.azure.com/schemas/2014-04-01-preview/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"name": {
"type": "string"
},
"type": {
"type": "string"
},
"regionId": {
"type": "string"
},
"requestSource": {
"type": "string"
}
},
"resources": [
{
"name": "[parameters('name')]",
"type": "microsoft.insights/components",
"location": "[parameters('regionId')]",
"apiVersion": "2014-08-01",
"properties": {
"ApplicationId": "[parameters('name')]",
"Application_Type": "[parameters('type')]",
"Flow_Type": "Redfield",
"Request_Source": "[parameters('requestSource')]"
}
}
],
"outputs": {
"resourceId": {
"value": "[resourceId('microsoft.insights/components', parameters('name'))]",
"type": "string"
}
}
}

Просмотреть файл

@ -1,115 +0,0 @@
// Requires the following Azure NuGet packages and related dependencies:
// package id="Microsoft.Azure.Management.Authorization" version="2.0.0"
// package id="Microsoft.Azure.Management.ResourceManager" version="1.4.0-preview"
// package id="Microsoft.Rest.ClientRuntime.Azure.Authentication" version="2.2.8-preview"
using Microsoft.Azure.Management.ResourceManager;
using Microsoft.Azure.Management.ResourceManager.Models;
using Microsoft.Rest.Azure.Authentication;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.IO;
namespace PortalGenerated
{
/// <summary>
/// This is a helper class for deploying an Azure Resource Manager template
/// More info about template deployments can be found here https://go.microsoft.com/fwLink/?LinkID=733371
/// </summary>
class DeploymentHelper
{
string subscriptionId = "your-subscription-id";
string clientId = "your-service-principal-clientId";
string clientSecret = "your-service-principal-client-secret";
string resourceGroupName = "resource-group-name";
string deploymentName = "deployment-name";
string resourceGroupLocation = "resource-group-location"; // must be specified for creating a new resource group
string pathToTemplateFile = "path-to-template.json-on-disk";
string pathToParameterFile = "path-to-parameters.json-on-disk";
string tenantId = "tenant-id";
public async void Run()
{
// Try to obtain the service credentials
var serviceCreds = await ApplicationTokenProvider.LoginSilentAsync(tenantId, clientId, clientSecret);
// Read the template and parameter file contents
JObject templateFileContents = GetJsonFileContents(pathToTemplateFile);
JObject parameterFileContents = GetJsonFileContents(pathToParameterFile);
// Create the resource manager client
var resourceManagementClient = new ResourceManagementClient(serviceCreds);
resourceManagementClient.SubscriptionId = subscriptionId;
// Create or check that resource group exists
EnsureResourceGroupExists(resourceManagementClient, resourceGroupName, resourceGroupLocation);
// Start a deployment
DeployTemplate(resourceManagementClient, resourceGroupName, deploymentName, templateFileContents, parameterFileContents);
}
/// <summary>
/// Reads a JSON file from the specified path
/// </summary>
/// <param name="pathToJson">The full path to the JSON file</param>
/// <returns>The JSON file contents</returns>
private JObject GetJsonFileContents(string pathToJson)
{
JObject templatefileContent = new JObject();
using (StreamReader file = File.OpenText(pathToJson))
{
using (JsonTextReader reader = new JsonTextReader(file))
{
templatefileContent = (JObject)JToken.ReadFrom(reader);
return templatefileContent;
}
}
}
/// <summary>
/// Ensures that a resource group with the specified name exists. If it does not, will attempt to create one.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="resourceGroupLocation">The resource group location. Required when creating a new resource group.</param>
private static void EnsureResourceGroupExists(ResourceManagementClient resourceManagementClient, string resourceGroupName, string resourceGroupLocation)
{
if (resourceManagementClient.ResourceGroups.CheckExistence(resourceGroupName) != true)
{
Console.WriteLine(string.Format("Creating resource group '{0}' in location '{1}'", resourceGroupName, resourceGroupLocation));
var resourceGroup = new ResourceGroup();
resourceGroup.Location = resourceGroupLocation;
resourceManagementClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup);
}
else
{
Console.WriteLine(string.Format("Using existing resource group '{0}'", resourceGroupName));
}
}
/// <summary>
/// Starts a template deployment.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="deploymentName">The name of the deployment.</param>
/// <param name="templateFileContents">The template file contents.</param>
/// <param name="parameterFileContents">The parameter file contents.</param>
private static void DeployTemplate(ResourceManagementClient resourceManagementClient, string resourceGroupName, string deploymentName, JObject templateFileContents, JObject parameterFileContents)
{
Console.WriteLine(string.Format("Starting template deployment '{0}' in resource group '{1}'", deploymentName, resourceGroupName));
var deployment = new Deployment();
deployment.Properties = new DeploymentProperties
{
Mode = DeploymentMode.Incremental,
Template = templateFileContents,
Parameters = parameterFileContents["parameters"].ToObject<JObject>()
};
var deploymentResult = resourceManagementClient.Deployments.CreateOrUpdate(resourceGroupName, deploymentName, deployment);
Console.WriteLine(string.Format("Deployment status: {0}", deploymentResult.Properties.ProvisioningState));
}
}
}

Просмотреть файл

@ -1,107 +0,0 @@
<#
.SYNOPSIS
Deploys a template to Azure
.DESCRIPTION
Deploys an Azure Resource Manager template
.PARAMETER subscriptionId
The subscription id where the template will be deployed.
.PARAMETER resourceGroupName
The resource group where the template will be deployed. Can be the name of an existing or a new resource group.
.PARAMETER resourceGroupLocation
Optional, a resource group location. If specified, will try to create a new resource group in this location. If not specified, assumes resource group is existing.
.PARAMETER deploymentName
The deployment name.
.PARAMETER templateFilePath
Optional, path to the template file. Defaults to template.json.
.PARAMETER parametersFilePath
Optional, path to the parameters file. Defaults to parameters.json. If file is not found, will prompt for parameter values based on template.
#>
param(
[Parameter(Mandatory=$True)]
[string]
$subscriptionId,
[Parameter(Mandatory=$True)]
[string]
$resourceGroupName,
[string]
$resourceGroupLocation,
[Parameter(Mandatory=$True)]
[string]
$deploymentName,
[string]
$templateFilePath = "template.json",
[string]
$parametersFilePath = "parameters.json"
)
<#
.SYNOPSIS
Registers RPs
#>
Function RegisterRP {
Param(
[string]$ResourceProviderNamespace
)
Write-Host "Registering resource provider '$ResourceProviderNamespace'";
Register-AzureRmResourceProvider -ProviderNamespace $ResourceProviderNamespace;
}
#******************************************************************************
# Script body
# Execution begins here
#******************************************************************************
$ErrorActionPreference = "Stop"
# sign in
Write-Host "Logging in...";
Login-AzureRmAccount;
# select subscription
Write-Host "Selecting subscription '$subscriptionId'";
Select-AzureRmSubscription -SubscriptionID $subscriptionId;
# Register RPs
$resourceProviders = @("microsoft.containerregistry");
if($resourceProviders.length) {
Write-Host "Registering resource providers"
foreach($resourceProvider in $resourceProviders) {
RegisterRP($resourceProvider);
}
}
#Create or check for existing resource group
$resourceGroup = Get-AzureRmResourceGroup -Name $resourceGroupName -ErrorAction SilentlyContinue
if(!$resourceGroup)
{
Write-Host "Resource group '$resourceGroupName' does not exist. To create a new resource group, please enter a location.";
if(!$resourceGroupLocation) {
$resourceGroupLocation = Read-Host "resourceGroupLocation";
}
Write-Host "Creating resource group '$resourceGroupName' in location '$resourceGroupLocation'";
New-AzureRmResourceGroup -Name $resourceGroupName -Location $resourceGroupLocation
}
else{
Write-Host "Using existing resource group '$resourceGroupName'";
}
# Start the deployment
Write-Host "Starting deployment...";
if(Test-Path $parametersFilePath) {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -Name $deploymentName -TemplateFile $templateFilePath -TemplateParameterFile $parametersFilePath;
} else {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -Name $deploymentName -TemplateFile $templateFilePath;
}

Просмотреть файл

@ -1,122 +0,0 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
# -e: immediately exit if any command has a non-zero exit status
# -o: prevents errors in a pipeline from being masked
# IFS new value is less likely to cause confusing bugs when looping arrays or arguments (e.g. $@)
usage() { echo "Usage: $0 -i <subscriptionId> -g <resourceGroupName> -n <deploymentName> -l <resourceGroupLocation>" 1>&2; exit 1; }
declare subscriptionId=""
declare resourceGroupName=""
declare deploymentName=""
declare resourceGroupLocation=""
# Initialize parameters specified from command line
while getopts ":i:g:n:l:" arg; do
case "${arg}" in
i)
subscriptionId=${OPTARG}
;;
g)
resourceGroupName=${OPTARG}
;;
n)
deploymentName=${OPTARG}
;;
l)
resourceGroupLocation=${OPTARG}
;;
esac
done
shift $((OPTIND-1))
#Prompt for parameters is some required parameters are missing
if [[ -z "$subscriptionId" ]]; then
echo "Your subscription ID can be looked up with the CLI using: az account show --out json "
echo "Enter your subscription ID:"
read subscriptionId
[[ "${subscriptionId:?}" ]]
fi
if [[ -z "$resourceGroupName" ]]; then
echo "This script will look for an existing resource group, otherwise a new one will be created "
echo "You can create new resource groups with the CLI using: az group create "
echo "Enter a resource group name"
read resourceGroupName
[[ "${resourceGroupName:?}" ]]
fi
if [[ -z "$deploymentName" ]]; then
echo "Enter a name for this deployment:"
read deploymentName
fi
if [[ -z "$resourceGroupLocation" ]]; then
echo "If creating a *new* resource group, you need to set a location "
echo "You can lookup locations with the CLI using: az account list-locations "
echo "Enter resource group location:"
read resourceGroupLocation
fi
#templateFile Path - template file to be used
templateFilePath="template.json"
if [ ! -f "$templateFilePath" ]; then
echo "$templateFilePath not found"
exit 1
fi
#parameter file path
parametersFilePath="parameters.json"
if [ ! -f "$parametersFilePath" ]; then
echo "$parametersFilePath not found"
exit 1
fi
if [ -z "$subscriptionId" ] || [ -z "$resourceGroupName" ] || [ -z "$deploymentName" ]; then
echo "Either one of subscriptionId, resourceGroupName, deploymentName is empty"
usage
fi
#login to azure using your credentials
az account show 1> /dev/null
if [ $? != 0 ];
then
az login
fi
#set the default subscription id
az account set --subscription $subscriptionId
set +e
#Check for existing RG
az group show --name $resourceGroupName 1> /dev/null
if [ $? != 0 ]; then
echo "Resource group with name" $resourceGroupName "could not be found. Creating new resource group.."
set -e
(
set -x
az group create --name $resourceGroupName --location $resourceGroupLocation 1> /dev/null
)
else
echo "Using existing resource group..."
fi
#Start deployment
echo "Starting deployment..."
(
set -x
az group deployment create --name "$deploymentName" --resource-group "$resourceGroupName" --template-file "$templateFilePath" --parameters "@${parametersFilePath}"
)
if [ $? == 0 ];
then
echo "Template has been successfully deployed"
fi

Просмотреть файл

@ -1,71 +0,0 @@
require 'azure_mgmt_resources'
class Deployer
# Initialize the deployer class with subscription, resource group and resource group location. The class will raise an
# ArgumentError if there are empty values for Tenant Id, Client Id or Client Secret environment variables.
#
# @param [String] subscription_id the subscription to deploy the template
# @param [String] resource_group the resource group to create or update and then deploy the template
# @param [String] resource_group_location the location of the resource group
def initialize(subscription_id, resource_group, resource_group_location)
raise ArgumentError.new("Missing template file 'template.json' in current directory.") unless File.exist?('template.json')
raise ArgumentError.new("Missing parameters file 'parameters.json' in current directory.") unless File.exist?('parameters.json')
@resource_group = resource_group
@subscription_id = subscription_id
@resource_group_location = resource_group_location
provider = MsRestAzure::ApplicationTokenProvider.new(
ENV['AZURE_TENANT_ID'],
ENV['AZURE_CLIENT_ID'],
ENV['AZURE_CLIENT_SECRET'])
credentials = MsRest::TokenCredentials.new(provider)
@client = Azure::ARM::Resources::ResourceManagementClient.new(credentials)
@client.subscription_id = @subscription_id
end
# Deploy the template to a resource group
def deploy
# ensure the resource group is created
params = Azure::ARM::Resources::Models::ResourceGroup.new.tap do |rg|
rg.location = @resource_group_location
end
@client.resource_groups.create_or_update(@resource_group, params).value!
# build the deployment from a json file template from parameters
template = File.read(File.expand_path(File.join(__dir__, 'template.json')))
deployment = Azure::ARM::Resources::Models::Deployment.new
deployment.properties = Azure::ARM::Resources::Models::DeploymentProperties.new
deployment.properties.template = JSON.parse(template)
deployment.properties.mode = Azure::ARM::Resources::Models::DeploymentMode::Incremental
# build the deployment template parameters from Hash to {key: {value: value}} format
deploy_params = File.read(File.expand_path(File.join(__dir__, 'parameters.json')))
deployment.properties.parameters = JSON.parse(deploy_params)["parameters"]
# put the deployment to the resource group
@client.deployments.create_or_update(@resource_group, 'azure-sample', deployment)
end
end
# Get user inputs and execute the script
if(ARGV.empty?)
puts "Please specify subscriptionId resourceGroupName resourceGroupLocation as command line arguments"
exit
end
subscription_id = ARGV[0] # Azure Subscription Id
resource_group = ARGV[1] # The resource group for deployment
resource_group_location = ARGV[2] # The resource group location
msg = "\nInitializing the Deployer class with subscription id: #{subscription_id}, resource group: #{resource_group}"
msg += "\nand resource group location: #{resource_group_location}...\n\n"
puts msg
# Initialize the deployer class
deployer = Deployer.new(subscription_id, resource_group, resource_group_location)
puts "Beginning the deployment... \n\n"
# Deploy the template
deployment = deployer.deploy
puts "Done deploying!!"

Просмотреть файл

@ -1,12 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"name": {
"value": "containerregistry"
},
"location": {
"value": "westeurope"
}
}
}

Просмотреть файл

@ -1,37 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"name": {
"type": "string"
},
"location": {
"type": "string"
}
},
"variables": {},
"resources": [
{
"type": "Microsoft.ContainerRegistry/registries",
"sku": {
"name": "Standard",
"tier": "Standard"
},
"name": "[parameters('name')]",
"apiVersion": "2017-10-01",
"location": "[parameters('location')]",
"tags": {},
"scale": null,
"properties": {
"adminUserEnabled": true
},
"dependsOn": []
}
],
"outputs": {
"resourceId": {
"value": "[resourceId('Microsoft.ContainerRegistry/registries', parameters('name'))]",
"type": "string"
}
}
}

Просмотреть файл

@ -1,115 +0,0 @@
// Requires the following Azure NuGet packages and related dependencies:
// package id="Microsoft.Azure.Management.Authorization" version="2.0.0"
// package id="Microsoft.Azure.Management.ResourceManager" version="1.4.0-preview"
// package id="Microsoft.Rest.ClientRuntime.Azure.Authentication" version="2.2.8-preview"
using Microsoft.Azure.Management.ResourceManager;
using Microsoft.Azure.Management.ResourceManager.Models;
using Microsoft.Rest.Azure.Authentication;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.IO;
namespace PortalGenerated
{
/// <summary>
/// This is a helper class for deploying an Azure Resource Manager template
/// More info about template deployments can be found here https://go.microsoft.com/fwLink/?LinkID=733371
/// </summary>
class DeploymentHelper
{
string subscriptionId = "your-subscription-id";
string clientId = "your-service-principal-clientId";
string clientSecret = "your-service-principal-client-secret";
string resourceGroupName = "resource-group-name";
string deploymentName = "deployment-name";
string resourceGroupLocation = "resource-group-location"; // must be specified for creating a new resource group
string pathToTemplateFile = "path-to-template.json-on-disk";
string pathToParameterFile = "path-to-parameters.json-on-disk";
string tenantId = "tenant-id";
public async void Run()
{
// Try to obtain the service credentials
var serviceCreds = await ApplicationTokenProvider.LoginSilentAsync(tenantId, clientId, clientSecret);
// Read the template and parameter file contents
JObject templateFileContents = GetJsonFileContents(pathToTemplateFile);
JObject parameterFileContents = GetJsonFileContents(pathToParameterFile);
// Create the resource manager client
var resourceManagementClient = new ResourceManagementClient(serviceCreds);
resourceManagementClient.SubscriptionId = subscriptionId;
// Create or check that resource group exists
EnsureResourceGroupExists(resourceManagementClient, resourceGroupName, resourceGroupLocation);
// Start a deployment
DeployTemplate(resourceManagementClient, resourceGroupName, deploymentName, templateFileContents, parameterFileContents);
}
/// <summary>
/// Reads a JSON file from the specified path
/// </summary>
/// <param name="pathToJson">The full path to the JSON file</param>
/// <returns>The JSON file contents</returns>
private JObject GetJsonFileContents(string pathToJson)
{
JObject templatefileContent = new JObject();
using (StreamReader file = File.OpenText(pathToJson))
{
using (JsonTextReader reader = new JsonTextReader(file))
{
templatefileContent = (JObject)JToken.ReadFrom(reader);
return templatefileContent;
}
}
}
/// <summary>
/// Ensures that a resource group with the specified name exists. If it does not, will attempt to create one.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="resourceGroupLocation">The resource group location. Required when creating a new resource group.</param>
private static void EnsureResourceGroupExists(ResourceManagementClient resourceManagementClient, string resourceGroupName, string resourceGroupLocation)
{
if (resourceManagementClient.ResourceGroups.CheckExistence(resourceGroupName) != true)
{
Console.WriteLine(string.Format("Creating resource group '{0}' in location '{1}'", resourceGroupName, resourceGroupLocation));
var resourceGroup = new ResourceGroup();
resourceGroup.Location = resourceGroupLocation;
resourceManagementClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup);
}
else
{
Console.WriteLine(string.Format("Using existing resource group '{0}'", resourceGroupName));
}
}
/// <summary>
/// Starts a template deployment.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="deploymentName">The name of the deployment.</param>
/// <param name="templateFileContents">The template file contents.</param>
/// <param name="parameterFileContents">The parameter file contents.</param>
private static void DeployTemplate(ResourceManagementClient resourceManagementClient, string resourceGroupName, string deploymentName, JObject templateFileContents, JObject parameterFileContents)
{
Console.WriteLine(string.Format("Starting template deployment '{0}' in resource group '{1}'", deploymentName, resourceGroupName));
var deployment = new Deployment();
deployment.Properties = new DeploymentProperties
{
Mode = DeploymentMode.Incremental,
Template = templateFileContents,
Parameters = parameterFileContents["parameters"].ToObject<JObject>()
};
var deploymentResult = resourceManagementClient.Deployments.CreateOrUpdate(resourceGroupName, deploymentName, deployment);
Console.WriteLine(string.Format("Deployment status: {0}", deploymentResult.Properties.ProvisioningState));
}
}
}

Просмотреть файл

@ -1,107 +0,0 @@
<#
.SYNOPSIS
Deploys a template to Azure
.DESCRIPTION
Deploys an Azure Resource Manager template
.PARAMETER subscriptionId
The subscription id where the template will be deployed.
.PARAMETER resourceGroupName
The resource group where the template will be deployed. Can be the name of an existing or a new resource group.
.PARAMETER resourceGroupLocation
Optional, a resource group location. If specified, will try to create a new resource group in this location. If not specified, assumes resource group is existing.
.PARAMETER deploymentName
The deployment name.
.PARAMETER templateFilePath
Optional, path to the template file. Defaults to template.json.
.PARAMETER parametersFilePath
Optional, path to the parameters file. Defaults to parameters.json. If file is not found, will prompt for parameter values based on template.
#>
param(
[Parameter(Mandatory=$True)]
[string]
$subscriptionId,
[Parameter(Mandatory=$True)]
[string]
$resourceGroupName,
[string]
$resourceGroupLocation,
[Parameter(Mandatory=$True)]
[string]
$deploymentName,
[string]
$templateFilePath = "template.json",
[string]
$parametersFilePath = "parameters.json"
)
<#
.SYNOPSIS
Registers RPs
#>
Function RegisterRP {
Param(
[string]$ResourceProviderNamespace
)
Write-Host "Registering resource provider '$ResourceProviderNamespace'";
Register-AzureRmResourceProvider -ProviderNamespace $ResourceProviderNamespace;
}
#******************************************************************************
# Script body
# Execution begins here
#******************************************************************************
$ErrorActionPreference = "Stop"
# sign in
Write-Host "Logging in...";
Login-AzureRmAccount;
# select subscription
Write-Host "Selecting subscription '$subscriptionId'";
Select-AzureRmSubscription -SubscriptionID $subscriptionId;
# Register RPs
$resourceProviders = @("microsoft.databricks");
if($resourceProviders.length) {
Write-Host "Registering resource providers"
foreach($resourceProvider in $resourceProviders) {
RegisterRP($resourceProvider);
}
}
#Create or check for existing resource group
$resourceGroup = Get-AzureRmResourceGroup -Name $resourceGroupName -ErrorAction SilentlyContinue
if(!$resourceGroup)
{
Write-Host "Resource group '$resourceGroupName' does not exist. To create a new resource group, please enter a location.";
if(!$resourceGroupLocation) {
$resourceGroupLocation = Read-Host "resourceGroupLocation";
}
Write-Host "Creating resource group '$resourceGroupName' in location '$resourceGroupLocation'";
New-AzureRmResourceGroup -Name $resourceGroupName -Location $resourceGroupLocation
}
else{
Write-Host "Using existing resource group '$resourceGroupName'";
}
# Start the deployment
Write-Host "Starting deployment...";
if(Test-Path $parametersFilePath) {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -Name $deploymentName -TemplateFile $templateFilePath -TemplateParameterFile $parametersFilePath;
} else {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -Name $deploymentName -TemplateFile $templateFilePath;
}

Просмотреть файл

@ -1,122 +0,0 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
# -e: immediately exit if any command has a non-zero exit status
# -o: prevents errors in a pipeline from being masked
# IFS new value is less likely to cause confusing bugs when looping arrays or arguments (e.g. $@)
usage() { echo "Usage: $0 -i <subscriptionId> -g <resourceGroupName> -n <deploymentName> -l <resourceGroupLocation>" 1>&2; exit 1; }
declare subscriptionId=""
declare resourceGroupName=""
declare deploymentName=""
declare resourceGroupLocation=""
# Initialize parameters specified from command line
while getopts ":i:g:n:l:" arg; do
case "${arg}" in
i)
subscriptionId=${OPTARG}
;;
g)
resourceGroupName=${OPTARG}
;;
n)
deploymentName=${OPTARG}
;;
l)
resourceGroupLocation=${OPTARG}
;;
esac
done
shift $((OPTIND-1))
#Prompt for parameters is some required parameters are missing
if [[ -z "$subscriptionId" ]]; then
echo "Your subscription ID can be looked up with the CLI using: az account show --out json "
echo "Enter your subscription ID:"
read subscriptionId
[[ "${subscriptionId:?}" ]]
fi
if [[ -z "$resourceGroupName" ]]; then
echo "This script will look for an existing resource group, otherwise a new one will be created "
echo "You can create new resource groups with the CLI using: az group create "
echo "Enter a resource group name"
read resourceGroupName
[[ "${resourceGroupName:?}" ]]
fi
if [[ -z "$deploymentName" ]]; then
echo "Enter a name for this deployment:"
read deploymentName
fi
if [[ -z "$resourceGroupLocation" ]]; then
echo "If creating a *new* resource group, you need to set a location "
echo "You can lookup locations with the CLI using: az account list-locations "
echo "Enter resource group location:"
read resourceGroupLocation
fi
#templateFile Path - template file to be used
templateFilePath="template.json"
if [ ! -f "$templateFilePath" ]; then
echo "$templateFilePath not found"
exit 1
fi
#parameter file path
parametersFilePath="parameters.json"
if [ ! -f "$parametersFilePath" ]; then
echo "$parametersFilePath not found"
exit 1
fi
if [ -z "$subscriptionId" ] || [ -z "$resourceGroupName" ] || [ -z "$deploymentName" ]; then
echo "Either one of subscriptionId, resourceGroupName, deploymentName is empty"
usage
fi
#login to azure using your credentials
az account show 1> /dev/null
if [ $? != 0 ];
then
az login
fi
#set the default subscription id
az account set --subscription $subscriptionId
set +e
#Check for existing RG
az group show --name $resourceGroupName 1> /dev/null
if [ $? != 0 ]; then
echo "Resource group with name" $resourceGroupName "could not be found. Creating new resource group.."
set -e
(
set -x
az group create --name $resourceGroupName --location $resourceGroupLocation 1> /dev/null
)
else
echo "Using existing resource group..."
fi
#Start deployment
echo "Starting deployment..."
(
set -x
az group deployment create --name "$deploymentName" --resource-group "$resourceGroupName" --template-file "$templateFilePath" --parameters "@${parametersFilePath}"
)
if [ $? == 0 ];
then
echo "Template has been successfully deployed"
fi

Просмотреть файл

@ -1,71 +0,0 @@
require 'azure_mgmt_resources'
class Deployer
# Initialize the deployer class with subscription, resource group and resource group location. The class will raise an
# ArgumentError if there are empty values for Tenant Id, Client Id or Client Secret environment variables.
#
# @param [String] subscription_id the subscription to deploy the template
# @param [String] resource_group the resource group to create or update and then deploy the template
# @param [String] resource_group_location the location of the resource group
def initialize(subscription_id, resource_group, resource_group_location)
raise ArgumentError.new("Missing template file 'template.json' in current directory.") unless File.exist?('template.json')
raise ArgumentError.new("Missing parameters file 'parameters.json' in current directory.") unless File.exist?('parameters.json')
@resource_group = resource_group
@subscription_id = subscription_id
@resource_group_location = resource_group_location
provider = MsRestAzure::ApplicationTokenProvider.new(
ENV['AZURE_TENANT_ID'],
ENV['AZURE_CLIENT_ID'],
ENV['AZURE_CLIENT_SECRET'])
credentials = MsRest::TokenCredentials.new(provider)
@client = Azure::ARM::Resources::ResourceManagementClient.new(credentials)
@client.subscription_id = @subscription_id
end
# Deploy the template to a resource group
def deploy
# ensure the resource group is created
params = Azure::ARM::Resources::Models::ResourceGroup.new.tap do |rg|
rg.location = @resource_group_location
end
@client.resource_groups.create_or_update(@resource_group, params).value!
# build the deployment from a json file template from parameters
template = File.read(File.expand_path(File.join(__dir__, 'template.json')))
deployment = Azure::ARM::Resources::Models::Deployment.new
deployment.properties = Azure::ARM::Resources::Models::DeploymentProperties.new
deployment.properties.template = JSON.parse(template)
deployment.properties.mode = Azure::ARM::Resources::Models::DeploymentMode::Incremental
# build the deployment template parameters from Hash to {key: {value: value}} format
deploy_params = File.read(File.expand_path(File.join(__dir__, 'parameters.json')))
deployment.properties.parameters = JSON.parse(deploy_params)["parameters"]
# put the deployment to the resource group
@client.deployments.create_or_update(@resource_group, 'azure-sample', deployment)
end
end
# Get user inputs and execute the script
if(ARGV.empty?)
puts "Please specify subscriptionId resourceGroupName resourceGroupLocation as command line arguments"
exit
end
subscription_id = ARGV[0] # Azure Subscription Id
resource_group = ARGV[1] # The resource group for deployment
resource_group_location = ARGV[2] # The resource group location
msg = "\nInitializing the Deployer class with subscription id: #{subscription_id}, resource group: #{resource_group}"
msg += "\nand resource group location: #{resource_group_location}...\n\n"
puts msg
# Initialize the deployer class
deployer = Deployer.new(subscription_id, resource_group, resource_group_location)
puts "Beginning the deployment... \n\n"
# Deploy the template
deployment = deployer.deploy
puts "Done deploying!!"

Просмотреть файл

@ -1,15 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"workspaceName": {
"value": "databricks"
},
"location": {
"value": "westeurope"
},
"tier": {
"value": "standard"
}
}
}

Просмотреть файл

@ -1,41 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"location": {
"type": "string"
},
"workspaceName": {
"type": "string"
},
"tier": {
"defaultValue": "premium",
"type": "string"
}
},
"resources": [
{
"apiVersion": "2018-04-01",
"location": "[parameters('location')]",
"name": "[parameters('workspaceName')]",
"sku": {
"name": "[parameters('tier')]"
},
"comments": "Please do not use an existing resource group for ManagedResourceGroupId.",
"properties": {
"ManagedResourceGroupId": "[variables('managedResourceGroupId')]"
},
"type": "Microsoft.Databricks/workspaces"
}
],
"variables": {
"managedResourceGroupId": "[concat(subscription().id, '/resourceGroups/', variables('managedResourceGroupName'))]",
"managedResourceGroupName": "[concat('databricks-rg-', parameters('workspaceName'), '-', uniqueString(parameters('workspaceName'), resourceGroup().id))]"
},
"outputs": {
"resourceId": {
"value": "[resourceId('Microsoft.Databricks/workspaces', parameters('workspaceName'))]",
"type": "string"
}
}
}

Просмотреть файл

@ -1,115 +0,0 @@
// Requires the following Azure NuGet packages and related dependencies:
// package id="Microsoft.Azure.Management.Authorization" version="2.0.0"
// package id="Microsoft.Azure.Management.ResourceManager" version="1.4.0-preview"
// package id="Microsoft.Rest.ClientRuntime.Azure.Authentication" version="2.2.8-preview"
using Microsoft.Azure.Management.ResourceManager;
using Microsoft.Azure.Management.ResourceManager.Models;
using Microsoft.Rest.Azure.Authentication;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.IO;
namespace PortalGenerated
{
/// <summary>
/// This is a helper class for deploying an Azure Resource Manager template
/// More info about template deployments can be found here https://go.microsoft.com/fwLink/?LinkID=733371
/// </summary>
class DeploymentHelper
{
string subscriptionId = "your-subscription-id";
string clientId = "your-service-principal-clientId";
string clientSecret = "your-service-principal-client-secret";
string resourceGroupName = "resource-group-name";
string deploymentName = "deployment-name";
string resourceGroupLocation = "resource-group-location"; // must be specified for creating a new resource group
string pathToTemplateFile = "path-to-template.json-on-disk";
string pathToParameterFile = "path-to-parameters.json-on-disk";
string tenantId = "tenant-id";
public async void Run()
{
// Try to obtain the service credentials
var serviceCreds = await ApplicationTokenProvider.LoginSilentAsync(tenantId, clientId, clientSecret);
// Read the template and parameter file contents
JObject templateFileContents = GetJsonFileContents(pathToTemplateFile);
JObject parameterFileContents = GetJsonFileContents(pathToParameterFile);
// Create the resource manager client
var resourceManagementClient = new ResourceManagementClient(serviceCreds);
resourceManagementClient.SubscriptionId = subscriptionId;
// Create or check that resource group exists
EnsureResourceGroupExists(resourceManagementClient, resourceGroupName, resourceGroupLocation);
// Start a deployment
DeployTemplate(resourceManagementClient, resourceGroupName, deploymentName, templateFileContents, parameterFileContents);
}
/// <summary>
/// Reads a JSON file from the specified path
/// </summary>
/// <param name="pathToJson">The full path to the JSON file</param>
/// <returns>The JSON file contents</returns>
private JObject GetJsonFileContents(string pathToJson)
{
JObject templatefileContent = new JObject();
using (StreamReader file = File.OpenText(pathToJson))
{
using (JsonTextReader reader = new JsonTextReader(file))
{
templatefileContent = (JObject)JToken.ReadFrom(reader);
return templatefileContent;
}
}
}
/// <summary>
/// Ensures that a resource group with the specified name exists. If it does not, will attempt to create one.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="resourceGroupLocation">The resource group location. Required when creating a new resource group.</param>
private static void EnsureResourceGroupExists(ResourceManagementClient resourceManagementClient, string resourceGroupName, string resourceGroupLocation)
{
if (resourceManagementClient.ResourceGroups.CheckExistence(resourceGroupName) != true)
{
Console.WriteLine(string.Format("Creating resource group '{0}' in location '{1}'", resourceGroupName, resourceGroupLocation));
var resourceGroup = new ResourceGroup();
resourceGroup.Location = resourceGroupLocation;
resourceManagementClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup);
}
else
{
Console.WriteLine(string.Format("Using existing resource group '{0}'", resourceGroupName));
}
}
/// <summary>
/// Starts a template deployment.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="deploymentName">The name of the deployment.</param>
/// <param name="templateFileContents">The template file contents.</param>
/// <param name="parameterFileContents">The parameter file contents.</param>
private static void DeployTemplate(ResourceManagementClient resourceManagementClient, string resourceGroupName, string deploymentName, JObject templateFileContents, JObject parameterFileContents)
{
Console.WriteLine(string.Format("Starting template deployment '{0}' in resource group '{1}'", deploymentName, resourceGroupName));
var deployment = new Deployment();
deployment.Properties = new DeploymentProperties
{
Mode = DeploymentMode.Incremental,
Template = templateFileContents,
Parameters = parameterFileContents["parameters"].ToObject<JObject>()
};
var deploymentResult = resourceManagementClient.Deployments.CreateOrUpdate(resourceGroupName, deploymentName, deployment);
Console.WriteLine(string.Format("Deployment status: {0}", deploymentResult.Properties.ProvisioningState));
}
}
}

Просмотреть файл

@ -1,107 +0,0 @@
<#
.SYNOPSIS
Deploys a template to Azure
.DESCRIPTION
Deploys an Azure Resource Manager template
.PARAMETER subscriptionId
The subscription id where the template will be deployed.
.PARAMETER resourceGroupName
The resource group where the template will be deployed. Can be the name of an existing or a new resource group.
.PARAMETER resourceGroupLocation
Optional, a resource group location. If specified, will try to create a new resource group in this location. If not specified, assumes resource group is existing.
.PARAMETER deploymentName
The deployment name.
.PARAMETER templateFilePath
Optional, path to the template file. Defaults to template.json.
.PARAMETER parametersFilePath
Optional, path to the parameters file. Defaults to parameters.json. If file is not found, will prompt for parameter values based on template.
#>
param(
[Parameter(Mandatory=$True)]
[string]
$subscriptionId,
[Parameter(Mandatory=$True)]
[string]
$resourceGroupName,
[string]
$resourceGroupLocation,
[Parameter(Mandatory=$True)]
[string]
$deploymentName,
[string]
$templateFilePath = "template.json",
[string]
$parametersFilePath = "parameters.json"
)
<#
.SYNOPSIS
Registers RPs
#>
Function RegisterRP {
Param(
[string]$ResourceProviderNamespace
)
Write-Host "Registering resource provider '$ResourceProviderNamespace'";
Register-AzureRmResourceProvider -ProviderNamespace $ResourceProviderNamespace;
}
#******************************************************************************
# Script body
# Execution begins here
#******************************************************************************
$ErrorActionPreference = "Stop"
# sign in
Write-Host "Logging in...";
Login-AzureRmAccount;
# select subscription
Write-Host "Selecting subscription '$subscriptionId'";
Select-AzureRmSubscription -SubscriptionID $subscriptionId;
# Register RPs
$resourceProviders = @("microsoft.keyvault");
if($resourceProviders.length) {
Write-Host "Registering resource providers"
foreach($resourceProvider in $resourceProviders) {
RegisterRP($resourceProvider);
}
}
#Create or check for existing resource group
$resourceGroup = Get-AzureRmResourceGroup -Name $resourceGroupName -ErrorAction SilentlyContinue
if(!$resourceGroup)
{
Write-Host "Resource group '$resourceGroupName' does not exist. To create a new resource group, please enter a location.";
if(!$resourceGroupLocation) {
$resourceGroupLocation = Read-Host "resourceGroupLocation";
}
Write-Host "Creating resource group '$resourceGroupName' in location '$resourceGroupLocation'";
New-AzureRmResourceGroup -Name $resourceGroupName -Location $resourceGroupLocation
}
else{
Write-Host "Using existing resource group '$resourceGroupName'";
}
# Start the deployment
Write-Host "Starting deployment...";
if(Test-Path $parametersFilePath) {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -Name $deploymentName -TemplateFile $templateFilePath -TemplateParameterFile $parametersFilePath;
} else {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -Name $deploymentName -TemplateFile $templateFilePath;
}

Просмотреть файл

@ -1,122 +0,0 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
# -e: immediately exit if any command has a non-zero exit status
# -o: prevents errors in a pipeline from being masked
# IFS new value is less likely to cause confusing bugs when looping arrays or arguments (e.g. $@)
usage() { echo "Usage: $0 -i <subscriptionId> -g <resourceGroupName> -n <deploymentName> -l <resourceGroupLocation>" 1>&2; exit 1; }
declare subscriptionId=""
declare resourceGroupName=""
declare deploymentName=""
declare resourceGroupLocation=""
# Initialize parameters specified from command line
while getopts ":i:g:n:l:" arg; do
case "${arg}" in
i)
subscriptionId=${OPTARG}
;;
g)
resourceGroupName=${OPTARG}
;;
n)
deploymentName=${OPTARG}
;;
l)
resourceGroupLocation=${OPTARG}
;;
esac
done
shift $((OPTIND-1))
#Prompt for parameters is some required parameters are missing
if [[ -z "$subscriptionId" ]]; then
echo "Your subscription ID can be looked up with the CLI using: az account show --out json "
echo "Enter your subscription ID:"
read subscriptionId
[[ "${subscriptionId:?}" ]]
fi
if [[ -z "$resourceGroupName" ]]; then
echo "This script will look for an existing resource group, otherwise a new one will be created "
echo "You can create new resource groups with the CLI using: az group create "
echo "Enter a resource group name"
read resourceGroupName
[[ "${resourceGroupName:?}" ]]
fi
if [[ -z "$deploymentName" ]]; then
echo "Enter a name for this deployment:"
read deploymentName
fi
if [[ -z "$resourceGroupLocation" ]]; then
echo "If creating a *new* resource group, you need to set a location "
echo "You can lookup locations with the CLI using: az account list-locations "
echo "Enter resource group location:"
read resourceGroupLocation
fi
#templateFile Path - template file to be used
templateFilePath="template.json"
if [ ! -f "$templateFilePath" ]; then
echo "$templateFilePath not found"
exit 1
fi
#parameter file path
parametersFilePath="parameters.json"
if [ ! -f "$parametersFilePath" ]; then
echo "$parametersFilePath not found"
exit 1
fi
if [ -z "$subscriptionId" ] || [ -z "$resourceGroupName" ] || [ -z "$deploymentName" ]; then
echo "Either one of subscriptionId, resourceGroupName, deploymentName is empty"
usage
fi
#login to azure using your credentials
az account show 1> /dev/null
if [ $? != 0 ];
then
az login
fi
#set the default subscription id
az account set --subscription $subscriptionId
set +e
#Check for existing RG
az group show --name $resourceGroupName 1> /dev/null
if [ $? != 0 ]; then
echo "Resource group with name" $resourceGroupName "could not be found. Creating new resource group.."
set -e
(
set -x
az group create --name $resourceGroupName --location $resourceGroupLocation 1> /dev/null
)
else
echo "Using existing resource group..."
fi
#Start deployment
echo "Starting deployment..."
(
set -x
az group deployment create --name "$deploymentName" --resource-group "$resourceGroupName" --template-file "$templateFilePath" --parameters "@${parametersFilePath}"
)
if [ $? == 0 ];
then
echo "Template has been successfully deployed"
fi

Просмотреть файл

@ -1,71 +0,0 @@
require 'azure_mgmt_resources'
class Deployer
# Initialize the deployer class with subscription, resource group and resource group location. The class will raise an
# ArgumentError if there are empty values for Tenant Id, Client Id or Client Secret environment variables.
#
# @param [String] subscription_id the subscription to deploy the template
# @param [String] resource_group the resource group to create or update and then deploy the template
# @param [String] resource_group_location the location of the resource group
def initialize(subscription_id, resource_group, resource_group_location)
raise ArgumentError.new("Missing template file 'template.json' in current directory.") unless File.exist?('template.json')
raise ArgumentError.new("Missing parameters file 'parameters.json' in current directory.") unless File.exist?('parameters.json')
@resource_group = resource_group
@subscription_id = subscription_id
@resource_group_location = resource_group_location
provider = MsRestAzure::ApplicationTokenProvider.new(
ENV['AZURE_TENANT_ID'],
ENV['AZURE_CLIENT_ID'],
ENV['AZURE_CLIENT_SECRET'])
credentials = MsRest::TokenCredentials.new(provider)
@client = Azure::ARM::Resources::ResourceManagementClient.new(credentials)
@client.subscription_id = @subscription_id
end
# Deploy the template to a resource group
def deploy
# ensure the resource group is created
params = Azure::ARM::Resources::Models::ResourceGroup.new.tap do |rg|
rg.location = @resource_group_location
end
@client.resource_groups.create_or_update(@resource_group, params).value!
# build the deployment from a json file template from parameters
template = File.read(File.expand_path(File.join(__dir__, 'template.json')))
deployment = Azure::ARM::Resources::Models::Deployment.new
deployment.properties = Azure::ARM::Resources::Models::DeploymentProperties.new
deployment.properties.template = JSON.parse(template)
deployment.properties.mode = Azure::ARM::Resources::Models::DeploymentMode::Incremental
# build the deployment template parameters from Hash to {key: {value: value}} format
deploy_params = File.read(File.expand_path(File.join(__dir__, 'parameters.json')))
deployment.properties.parameters = JSON.parse(deploy_params)["parameters"]
# put the deployment to the resource group
@client.deployments.create_or_update(@resource_group, 'azure-sample', deployment)
end
end
# Get user inputs and execute the script
if(ARGV.empty?)
puts "Please specify subscriptionId resourceGroupName resourceGroupLocation as command line arguments"
exit
end
subscription_id = ARGV[0] # Azure Subscription Id
resource_group = ARGV[1] # The resource group for deployment
resource_group_location = ARGV[2] # The resource group location
msg = "\nInitializing the Deployer class with subscription id: #{subscription_id}, resource group: #{resource_group}"
msg += "\nand resource group location: #{resource_group_location}...\n\n"
puts msg
# Initialize the deployer class
deployer = Deployer.new(subscription_id, resource_group, resource_group_location)
puts "Beginning the deployment... \n\n"
# Deploy the template
deployment = deployer.deploy
puts "Done deploying!!"

Просмотреть файл

@ -1,18 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"name": {
"value": "keyvault"
},
"location": {
"value": "westeurope"
},
"tenantId": {
"value": "tenantid"
},
"objectId": {
"value": "357e0b82-f4db-4e11-b279-654fd1b93629"
}
}
}

Просмотреть файл

@ -1,91 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"name": {
"type": "string"
},
"location": {
"type": "string"
} ,
"tenantId": {
"type": "string"
},
"objectId": {
"type": "string"
}
},
"variables": {},
"resources": [
{
"type": "Microsoft.KeyVault/vaults",
"name": "[parameters('name')]",
"apiVersion": "2016-10-01",
"location": "[parameters('location')]",
"tags": {},
"scale": null,
"properties": {
"sku": {
"family": "A",
"name": "Standard"
},
"tenantId": "72f988bf-86f1-41af-91ab-2d7cd011db47",
"accessPolicies": [
{
"tenantId": "[parameters('tenantId')]",
"objectId": "[parameters('objectId')]",
"permissions": {
"keys": [
"Get",
"List",
"Update",
"Create",
"Import",
"Delete",
"Recover",
"Backup",
"Restore"
],
"secrets": [
"Get",
"List",
"Set",
"Delete",
"Recover",
"Backup",
"Restore"
],
"certificates": [
"Get",
"List",
"Update",
"Create",
"Import",
"Delete",
"Recover",
"Backup",
"Restore",
"ManageContacts",
"ManageIssuers",
"GetIssuers",
"ListIssuers",
"SetIssuers",
"DeleteIssuers"
]
}
}
],
"enabledForDeployment": false,
"enabledForDiskEncryption": false,
"enabledForTemplateDeployment": false
},
"dependsOn": []
}
],
"outputs": {
"resourceId": {
"value": "[resourceId('Microsoft.KeyVault/vaults', parameters('name'))]",
"type": "string"
}
}
}

Просмотреть файл

@ -1,30 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"workspaceName": {
"value": "myworkspace"
},
"clusterName": {
"value": "mycluster"
},
"minNodeCount": {
"value": 0
},
"maxNodeCount": {
"value": 1
},
"adminUserName": {
"value": "<Fill-here>"
},
"adminUserPassword": {
"value": "<Fill-here>"
},
"workspacelocation": {
"value": "<Fill-here>"
},
"vmSize": {
"value": "Standard_D3_V2"
}
}
}

Просмотреть файл

@ -1,57 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"workspaceName": {
"type": "string"
},
"clusterName": {
"type": "string"
},
"minNodeCount": {
"type": "int"
},
"maxNodeCount": {
"type": "int"
},
"workspacelocation": {
"type": "string"
},
"adminUserName": {
"type": "string"
},
"adminUserPassword": {
"type": "string"
},
"vmSize": {
"type": "string"
}
},
"variables": {},
"resources": [
{
"type": "Microsoft.MachineLearningServices/workspaces/computes",
"name": "[concat(parameters('workspaceName'), '/', parameters('clusterName'))]",
"apiVersion": "2018-11-19",
"location" : "[parameters('workspacelocation')]",
"properties": {
"computeType": "AmlCompute",
"computeLocation" : "[parameters('workspacelocation')]",
"properties":
{
"vmSize" : "[parameters('vmSize')]",
"scaleSettings":
{
"minNodeCount" : "[parameters('minNodeCount')]",
"maxNodeCount" : "[parameters('maxNodeCount')]"
},
"userAccountCredentials" :
{
"adminUserName" : "[parameters('adminUserName')]",
"adminUserPassword" : "[parameters('adminUserPassword')]"
}
}
}
}
]
}

Просмотреть файл

@ -1,115 +0,0 @@
// Requires the following Azure NuGet packages and related dependencies:
// package id="Microsoft.Azure.Management.Authorization" version="2.0.0"
// package id="Microsoft.Azure.Management.ResourceManager" version="1.4.0-preview"
// package id="Microsoft.Rest.ClientRuntime.Azure.Authentication" version="2.2.8-preview"
using Microsoft.Azure.Management.ResourceManager;
using Microsoft.Azure.Management.ResourceManager.Models;
using Microsoft.Rest.Azure.Authentication;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.IO;
namespace PortalGenerated
{
/// <summary>
/// This is a helper class for deploying an Azure Resource Manager template
/// More info about template deployments can be found here https://go.microsoft.com/fwLink/?LinkID=733371
/// </summary>
class DeploymentHelper
{
string subscriptionId = "your-subscription-id";
string clientId = "your-service-principal-clientId";
string clientSecret = "your-service-principal-client-secret";
string resourceGroupName = "resource-group-name";
string deploymentName = "deployment-name";
string resourceGroupLocation = "resource-group-location"; // must be specified for creating a new resource group
string pathToTemplateFile = "path-to-template.json-on-disk";
string pathToParameterFile = "path-to-parameters.json-on-disk";
string tenantId = "tenant-id";
public async void Run()
{
// Try to obtain the service credentials
var serviceCreds = await ApplicationTokenProvider.LoginSilentAsync(tenantId, clientId, clientSecret);
// Read the template and parameter file contents
JObject templateFileContents = GetJsonFileContents(pathToTemplateFile);
JObject parameterFileContents = GetJsonFileContents(pathToParameterFile);
// Create the resource manager client
var resourceManagementClient = new ResourceManagementClient(serviceCreds);
resourceManagementClient.SubscriptionId = subscriptionId;
// Create or check that resource group exists
EnsureResourceGroupExists(resourceManagementClient, resourceGroupName, resourceGroupLocation);
// Start a deployment
DeployTemplate(resourceManagementClient, resourceGroupName, deploymentName, templateFileContents, parameterFileContents);
}
/// <summary>
/// Reads a JSON file from the specified path
/// </summary>
/// <param name="pathToJson">The full path to the JSON file</param>
/// <returns>The JSON file contents</returns>
private JObject GetJsonFileContents(string pathToJson)
{
JObject templatefileContent = new JObject();
using (StreamReader file = File.OpenText(pathToJson))
{
using (JsonTextReader reader = new JsonTextReader(file))
{
templatefileContent = (JObject)JToken.ReadFrom(reader);
return templatefileContent;
}
}
}
/// <summary>
/// Ensures that a resource group with the specified name exists. If it does not, will attempt to create one.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="resourceGroupLocation">The resource group location. Required when creating a new resource group.</param>
private static void EnsureResourceGroupExists(ResourceManagementClient resourceManagementClient, string resourceGroupName, string resourceGroupLocation)
{
if (resourceManagementClient.ResourceGroups.CheckExistence(resourceGroupName) != true)
{
Console.WriteLine(string.Format("Creating resource group '{0}' in location '{1}'", resourceGroupName, resourceGroupLocation));
var resourceGroup = new ResourceGroup();
resourceGroup.Location = resourceGroupLocation;
resourceManagementClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup);
}
else
{
Console.WriteLine(string.Format("Using existing resource group '{0}'", resourceGroupName));
}
}
/// <summary>
/// Starts a template deployment.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="deploymentName">The name of the deployment.</param>
/// <param name="templateFileContents">The template file contents.</param>
/// <param name="parameterFileContents">The parameter file contents.</param>
private static void DeployTemplate(ResourceManagementClient resourceManagementClient, string resourceGroupName, string deploymentName, JObject templateFileContents, JObject parameterFileContents)
{
Console.WriteLine(string.Format("Starting template deployment '{0}' in resource group '{1}'", deploymentName, resourceGroupName));
var deployment = new Deployment();
deployment.Properties = new DeploymentProperties
{
Mode = DeploymentMode.Incremental,
Template = templateFileContents,
Parameters = parameterFileContents["parameters"].ToObject<JObject>()
};
var deploymentResult = resourceManagementClient.Deployments.CreateOrUpdate(resourceGroupName, deploymentName, deployment);
Console.WriteLine(string.Format("Deployment status: {0}", deploymentResult.Properties.ProvisioningState));
}
}
}

Просмотреть файл

@ -1,107 +0,0 @@
<#
.SYNOPSIS
Deploys a template to Azure
.DESCRIPTION
Deploys an Azure Resource Manager template
.PARAMETER subscriptionId
The subscription id where the template will be deployed.
.PARAMETER resourceGroupName
The resource group where the template will be deployed. Can be the name of an existing or a new resource group.
.PARAMETER resourceGroupLocation
Optional, a resource group location. If specified, will try to create a new resource group in this location. If not specified, assumes resource group is existing.
.PARAMETER deploymentName
The deployment name.
.PARAMETER templateFilePath
Optional, path to the template file. Defaults to template.json.
.PARAMETER parametersFilePath
Optional, path to the parameters file. Defaults to parameters.json. If file is not found, will prompt for parameter values based on template.
#>
param(
[Parameter(Mandatory=$True)]
[string]
$subscriptionId,
[Parameter(Mandatory=$True)]
[string]
$resourceGroupName,
[string]
$resourceGroupLocation,
[Parameter(Mandatory=$True)]
[string]
$deploymentName,
[string]
$templateFilePath = "template.json",
[string]
$parametersFilePath = "parameters.json"
)
<#
.SYNOPSIS
Registers RPs
#>
Function RegisterRP {
Param(
[string]$ResourceProviderNamespace
)
Write-Host "Registering resource provider '$ResourceProviderNamespace'";
Register-AzureRmResourceProvider -ProviderNamespace $ResourceProviderNamespace;
}
#******************************************************************************
# Script body
# Execution begins here
#******************************************************************************
$ErrorActionPreference = "Stop"
# sign in
Write-Host "Logging in...";
Login-AzureRmAccount;
# select subscription
Write-Host "Selecting subscription '$subscriptionId'";
Select-AzureRmSubscription -SubscriptionID $subscriptionId;
# Register RPs
$resourceProviders = @("microsoft.machinelearningservices","microsoft.storage","microsoft.containerregistry","microsoft.keyvault","microsoft.insights");
if($resourceProviders.length) {
Write-Host "Registering resource providers"
foreach($resourceProvider in $resourceProviders) {
RegisterRP($resourceProvider);
}
}
#Create or check for existing resource group
$resourceGroup = Get-AzureRmResourceGroup -Name $resourceGroupName -ErrorAction SilentlyContinue
if(!$resourceGroup)
{
Write-Host "Resource group '$resourceGroupName' does not exist. To create a new resource group, please enter a location.";
if(!$resourceGroupLocation) {
$resourceGroupLocation = Read-Host "resourceGroupLocation";
}
Write-Host "Creating resource group '$resourceGroupName' in location '$resourceGroupLocation'";
New-AzureRmResourceGroup -Name $resourceGroupName -Location $resourceGroupLocation
}
else{
Write-Host "Using existing resource group '$resourceGroupName'";
}
# Start the deployment
Write-Host "Starting deployment...";
if(Test-Path $parametersFilePath) {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -Name $deploymentName -TemplateFile $templateFilePath -TemplateParameterFile $parametersFilePath;
} else {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -Name $deploymentName -TemplateFile $templateFilePath;
}

Просмотреть файл

@ -1,122 +0,0 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
# -e: immediately exit if any command has a non-zero exit status
# -o: prevents errors in a pipeline from being masked
# IFS new value is less likely to cause confusing bugs when looping arrays or arguments (e.g. $@)
usage() { echo "Usage: $0 -i <subscriptionId> -g <resourceGroupName> -n <deploymentName> -l <resourceGroupLocation>" 1>&2; exit 1; }
declare subscriptionId=""
declare resourceGroupName=""
declare deploymentName=""
declare resourceGroupLocation=""
# Initialize parameters specified from command line
while getopts ":i:g:n:l:" arg; do
case "${arg}" in
i)
subscriptionId=${OPTARG}
;;
g)
resourceGroupName=${OPTARG}
;;
n)
deploymentName=${OPTARG}
;;
l)
resourceGroupLocation=${OPTARG}
;;
esac
done
shift $((OPTIND-1))
#Prompt for parameters is some required parameters are missing
if [[ -z "$subscriptionId" ]]; then
echo "Your subscription ID can be looked up with the CLI using: az account show --out json "
echo "Enter your subscription ID:"
read subscriptionId
[[ "${subscriptionId:?}" ]]
fi
if [[ -z "$resourceGroupName" ]]; then
echo "This script will look for an existing resource group, otherwise a new one will be created "
echo "You can create new resource groups with the CLI using: az group create "
echo "Enter a resource group name"
read resourceGroupName
[[ "${resourceGroupName:?}" ]]
fi
if [[ -z "$deploymentName" ]]; then
echo "Enter a name for this deployment:"
read deploymentName
fi
if [[ -z "$resourceGroupLocation" ]]; then
echo "If creating a *new* resource group, you need to set a location "
echo "You can lookup locations with the CLI using: az account list-locations "
echo "Enter resource group location:"
read resourceGroupLocation
fi
#templateFile Path - template file to be used
templateFilePath="template.json"
if [ ! -f "$templateFilePath" ]; then
echo "$templateFilePath not found"
exit 1
fi
#parameter file path
parametersFilePath="parameters.json"
if [ ! -f "$parametersFilePath" ]; then
echo "$parametersFilePath not found"
exit 1
fi
if [ -z "$subscriptionId" ] || [ -z "$resourceGroupName" ] || [ -z "$deploymentName" ]; then
echo "Either one of subscriptionId, resourceGroupName, deploymentName is empty"
usage
fi
#login to azure using your credentials
az account show 1> /dev/null
if [ $? != 0 ];
then
az login
fi
#set the default subscription id
az account set --subscription $subscriptionId
set +e
#Check for existing RG
az group show --name $resourceGroupName 1> /dev/null
if [ $? != 0 ]; then
echo "Resource group with name" $resourceGroupName "could not be found. Creating new resource group.."
set -e
(
set -x
az group create --name $resourceGroupName --location $resourceGroupLocation 1> /dev/null
)
else
echo "Using existing resource group..."
fi
#Start deployment
echo "Starting deployment..."
(
set -x
az group deployment create --name "$deploymentName" --resource-group "$resourceGroupName" --template-file "$templateFilePath" --parameters "@${parametersFilePath}"
)
if [ $? == 0 ];
then
echo "Template has been successfully deployed"
fi

Просмотреть файл

@ -1,71 +0,0 @@
require 'azure_mgmt_resources'
class Deployer
# Initialize the deployer class with subscription, resource group and resource group location. The class will raise an
# ArgumentError if there are empty values for Tenant Id, Client Id or Client Secret environment variables.
#
# @param [String] subscription_id the subscription to deploy the template
# @param [String] resource_group the resource group to create or update and then deploy the template
# @param [String] resource_group_location the location of the resource group
def initialize(subscription_id, resource_group, resource_group_location)
raise ArgumentError.new("Missing template file 'template.json' in current directory.") unless File.exist?('template.json')
raise ArgumentError.new("Missing parameters file 'parameters.json' in current directory.") unless File.exist?('parameters.json')
@resource_group = resource_group
@subscription_id = subscription_id
@resource_group_location = resource_group_location
provider = MsRestAzure::ApplicationTokenProvider.new(
ENV['AZURE_TENANT_ID'],
ENV['AZURE_CLIENT_ID'],
ENV['AZURE_CLIENT_SECRET'])
credentials = MsRest::TokenCredentials.new(provider)
@client = Azure::ARM::Resources::ResourceManagementClient.new(credentials)
@client.subscription_id = @subscription_id
end
# Deploy the template to a resource group
def deploy
# ensure the resource group is created
params = Azure::ARM::Resources::Models::ResourceGroup.new.tap do |rg|
rg.location = @resource_group_location
end
@client.resource_groups.create_or_update(@resource_group, params).value!
# build the deployment from a json file template from parameters
template = File.read(File.expand_path(File.join(__dir__, 'template.json')))
deployment = Azure::ARM::Resources::Models::Deployment.new
deployment.properties = Azure::ARM::Resources::Models::DeploymentProperties.new
deployment.properties.template = JSON.parse(template)
deployment.properties.mode = Azure::ARM::Resources::Models::DeploymentMode::Incremental
# build the deployment template parameters from Hash to {key: {value: value}} format
deploy_params = File.read(File.expand_path(File.join(__dir__, 'parameters.json')))
deployment.properties.parameters = JSON.parse(deploy_params)["parameters"]
# put the deployment to the resource group
@client.deployments.create_or_update(@resource_group, 'azure-sample', deployment)
end
end
# Get user inputs and execute the script
if(ARGV.empty?)
puts "Please specify subscriptionId resourceGroupName resourceGroupLocation as command line arguments"
exit
end
subscription_id = ARGV[0] # Azure Subscription Id
resource_group = ARGV[1] # The resource group for deployment
resource_group_location = ARGV[2] # The resource group location
msg = "\nInitializing the Deployer class with subscription id: #{subscription_id}, resource group: #{resource_group}"
msg += "\nand resource group location: #{resource_group_location}...\n\n"
puts msg
# Initialize the deployer class
deployer = Deployer.new(subscription_id, resource_group, resource_group_location)
puts "Beginning the deployment... \n\n"
# Deploy the template
deployment = deployer.deploy
puts "Done deploying!!"

Просмотреть файл

@ -1,33 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"appInsightsId": {
"value": ""
},
"containerRegistryId": {
"value": ""
},
"description": {
"value": ""
},
"friendlyName": {
"value": ""
},
"keyVaultId": {
"value": ""
},
"location": {
"value": "westeurope"
},
"machineLearningApiVersion": {
"value": "2018-11-19"
},
"name": {
"value": ""
},
"storageAccountId": {
"value": ""
}
}
}

Просмотреть файл

@ -1,5 +0,0 @@
# register datastore
az ml datastore register-blob -n $datastorename -a $accountname -k $accountkey -c $containername
# set default datastore
az ml datastore set-default -n $datastorename

Просмотреть файл

Просмотреть файл

@ -1,53 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"machineLearningApiVersion": {
"type": "string"
},
"description": {
"type": "string"
},
"friendlyName": {
"type": "string"
},
"location": {
"type": "string"
},
"name": {
"type": "string"
},
"storageAccountId": {
"type": "string"
},
"appInsightsId": {
"type": "string"
},
"containerRegistryId": {
"type": "string"
},
"keyVaultId": {
"type": "string"
}
},
"resources": [
{
"name": "[parameters('name')]",
"type": "Microsoft.MachineLearningServices/workspaces",
"apiVersion": "2018-11-19",
"location": "[parameters('location')]",
"identity": {
"type": "systemAssigned"
},
"tags": {},
"properties": {
"description": "[parameters('description')]",
"friendlyName": "[parameters('friendlyName')]",
"keyVault": "[parameters('keyVaultId')]",
"applicationInsights": "[parameters('appInsightsId')]",
"containerRegistry": "[parameters('containerRegistryId')]",
"storageAccount": "[parameters('storageAccountId')]"
}
}
]
}

Просмотреть файл

@ -1,115 +0,0 @@
// Requires the following Azure NuGet packages and related dependencies:
// package id="Microsoft.Azure.Management.Authorization" version="2.0.0"
// package id="Microsoft.Azure.Management.ResourceManager" version="1.4.0-preview"
// package id="Microsoft.Rest.ClientRuntime.Azure.Authentication" version="2.2.8-preview"
using Microsoft.Azure.Management.ResourceManager;
using Microsoft.Azure.Management.ResourceManager.Models;
using Microsoft.Rest.Azure.Authentication;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.IO;
namespace PortalGenerated
{
/// <summary>
/// This is a helper class for deploying an Azure Resource Manager template
/// More info about template deployments can be found here https://go.microsoft.com/fwLink/?LinkID=733371
/// </summary>
class DeploymentHelper
{
string subscriptionId = "your-subscription-id";
string clientId = "your-service-principal-clientId";
string clientSecret = "your-service-principal-client-secret";
string resourceGroupName = "resource-group-name";
string deploymentName = "deployment-name";
string resourceGroupLocation = "resource-group-location"; // must be specified for creating a new resource group
string pathToTemplateFile = "path-to-template.json-on-disk";
string pathToParameterFile = "path-to-parameters.json-on-disk";
string tenantId = "tenant-id";
public async void Run()
{
// Try to obtain the service credentials
var serviceCreds = await ApplicationTokenProvider.LoginSilentAsync(tenantId, clientId, clientSecret);
// Read the template and parameter file contents
JObject templateFileContents = GetJsonFileContents(pathToTemplateFile);
JObject parameterFileContents = GetJsonFileContents(pathToParameterFile);
// Create the resource manager client
var resourceManagementClient = new ResourceManagementClient(serviceCreds);
resourceManagementClient.SubscriptionId = subscriptionId;
// Create or check that resource group exists
EnsureResourceGroupExists(resourceManagementClient, resourceGroupName, resourceGroupLocation);
// Start a deployment
DeployTemplate(resourceManagementClient, resourceGroupName, deploymentName, templateFileContents, parameterFileContents);
}
/// <summary>
/// Reads a JSON file from the specified path
/// </summary>
/// <param name="pathToJson">The full path to the JSON file</param>
/// <returns>The JSON file contents</returns>
private JObject GetJsonFileContents(string pathToJson)
{
JObject templatefileContent = new JObject();
using (StreamReader file = File.OpenText(pathToJson))
{
using (JsonTextReader reader = new JsonTextReader(file))
{
templatefileContent = (JObject)JToken.ReadFrom(reader);
return templatefileContent;
}
}
}
/// <summary>
/// Ensures that a resource group with the specified name exists. If it does not, will attempt to create one.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="resourceGroupLocation">The resource group location. Required when creating a new resource group.</param>
private static void EnsureResourceGroupExists(ResourceManagementClient resourceManagementClient, string resourceGroupName, string resourceGroupLocation)
{
if (resourceManagementClient.ResourceGroups.CheckExistence(resourceGroupName) != true)
{
Console.WriteLine(string.Format("Creating resource group '{0}' in location '{1}'", resourceGroupName, resourceGroupLocation));
var resourceGroup = new ResourceGroup();
resourceGroup.Location = resourceGroupLocation;
resourceManagementClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup);
}
else
{
Console.WriteLine(string.Format("Using existing resource group '{0}'", resourceGroupName));
}
}
/// <summary>
/// Starts a template deployment.
/// </summary>
/// <param name="resourceManagementClient">The resource manager client.</param>
/// <param name="resourceGroupName">The name of the resource group.</param>
/// <param name="deploymentName">The name of the deployment.</param>
/// <param name="templateFileContents">The template file contents.</param>
/// <param name="parameterFileContents">The parameter file contents.</param>
private static void DeployTemplate(ResourceManagementClient resourceManagementClient, string resourceGroupName, string deploymentName, JObject templateFileContents, JObject parameterFileContents)
{
Console.WriteLine(string.Format("Starting template deployment '{0}' in resource group '{1}'", deploymentName, resourceGroupName));
var deployment = new Deployment();
deployment.Properties = new DeploymentProperties
{
Mode = DeploymentMode.Incremental,
Template = templateFileContents,
Parameters = parameterFileContents["parameters"].ToObject<JObject>()
};
var deploymentResult = resourceManagementClient.Deployments.CreateOrUpdate(resourceGroupName, deploymentName, deployment);
Console.WriteLine(string.Format("Deployment status: {0}", deploymentResult.Properties.ProvisioningState));
}
}
}

Просмотреть файл

@ -1,107 +0,0 @@
<#
.SYNOPSIS
Deploys a template to Azure
.DESCRIPTION
Deploys an Azure Resource Manager template
.PARAMETER subscriptionId
The subscription id where the template will be deployed.
.PARAMETER resourceGroupName
The resource group where the template will be deployed. Can be the name of an existing or a new resource group.
.PARAMETER resourceGroupLocation
Optional, a resource group location. If specified, will try to create a new resource group in this location. If not specified, assumes resource group is existing.
.PARAMETER deploymentName
The deployment name.
.PARAMETER templateFilePath
Optional, path to the template file. Defaults to template.json.
.PARAMETER parametersFilePath
Optional, path to the parameters file. Defaults to parameters.json. If file is not found, will prompt for parameter values based on template.
#>
param(
[Parameter(Mandatory=$True)]
[string]
$subscriptionId,
[Parameter(Mandatory=$True)]
[string]
$resourceGroupName,
[string]
$resourceGroupLocation,
[Parameter(Mandatory=$True)]
[string]
$deploymentName,
[string]
$templateFilePath = "template.json",
[string]
$parametersFilePath = "parameters.json"
)
<#
.SYNOPSIS
Registers RPs
#>
Function RegisterRP {
Param(
[string]$ResourceProviderNamespace
)
Write-Host "Registering resource provider '$ResourceProviderNamespace'";
Register-AzureRmResourceProvider -ProviderNamespace $ResourceProviderNamespace;
}
#******************************************************************************
# Script body
# Execution begins here
#******************************************************************************
$ErrorActionPreference = "Stop"
# sign in
Write-Host "Logging in...";
Login-AzureRmAccount;
# select subscription
Write-Host "Selecting subscription '$subscriptionId'";
Select-AzureRmSubscription -SubscriptionID $subscriptionId;
# Register RPs
$resourceProviders = @("microsoft.storage");
if($resourceProviders.length) {
Write-Host "Registering resource providers"
foreach($resourceProvider in $resourceProviders) {
RegisterRP($resourceProvider);
}
}
#Create or check for existing resource group
$resourceGroup = Get-AzureRmResourceGroup -Name $resourceGroupName -ErrorAction SilentlyContinue
if(!$resourceGroup)
{
Write-Host "Resource group '$resourceGroupName' does not exist. To create a new resource group, please enter a location.";
if(!$resourceGroupLocation) {
$resourceGroupLocation = Read-Host "resourceGroupLocation";
}
Write-Host "Creating resource group '$resourceGroupName' in location '$resourceGroupLocation'";
New-AzureRmResourceGroup -Name $resourceGroupName -Location $resourceGroupLocation
}
else{
Write-Host "Using existing resource group '$resourceGroupName'";
}
# Start the deployment
Write-Host "Starting deployment...";
if(Test-Path $parametersFilePath) {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -TemplateFile $templateFilePath -TemplateParameterFile $parametersFilePath;
} else {
New-AzureRmResourceGroupDeployment -ResourceGroupName $resourceGroupName -TemplateFile $templateFilePath;
}

Просмотреть файл

@ -1,122 +0,0 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
# -e: immediately exit if any command has a non-zero exit status
# -o: prevents errors in a pipeline from being masked
# IFS new value is less likely to cause confusing bugs when looping arrays or arguments (e.g. $@)
usage() { echo "Usage: $0 -i <subscriptionId> -g <resourceGroupName> -n <deploymentName> -l <resourceGroupLocation>" 1>&2; exit 1; }
declare subscriptionId=""
declare resourceGroupName=""
declare deploymentName=""
declare resourceGroupLocation=""
# Initialize parameters specified from command line
while getopts ":i:g:n:l:" arg; do
case "${arg}" in
i)
subscriptionId=${OPTARG}
;;
g)
resourceGroupName=${OPTARG}
;;
n)
deploymentName=${OPTARG}
;;
l)
resourceGroupLocation=${OPTARG}
;;
esac
done
shift $((OPTIND-1))
#Prompt for parameters is some required parameters are missing
if [[ -z "$subscriptionId" ]]; then
echo "Your subscription ID can be looked up with the CLI using: az account show --out json "
echo "Enter your subscription ID:"
read subscriptionId
[[ "${subscriptionId:?}" ]]
fi
if [[ -z "$resourceGroupName" ]]; then
echo "This script will look for an existing resource group, otherwise a new one will be created "
echo "You can create new resource groups with the CLI using: az group create "
echo "Enter a resource group name"
read resourceGroupName
[[ "${resourceGroupName:?}" ]]
fi
if [[ -z "$deploymentName" ]]; then
echo "Enter a name for this deployment:"
read deploymentName
fi
if [[ -z "$resourceGroupLocation" ]]; then
echo "If creating a *new* resource group, you need to set a location "
echo "You can lookup locations with the CLI using: az account list-locations "
echo "Enter resource group location:"
read resourceGroupLocation
fi
#templateFile Path - template file to be used
templateFilePath="template.json"
if [ ! -f "$templateFilePath" ]; then
echo "$templateFilePath not found"
exit 1
fi
#parameter file path
parametersFilePath="parameters.json"
if [ ! -f "$parametersFilePath" ]; then
echo "$parametersFilePath not found"
exit 1
fi
if [ -z "$subscriptionId" ] || [ -z "$resourceGroupName" ] || [ -z "$deploymentName" ]; then
echo "Either one of subscriptionId, resourceGroupName, deploymentName is empty"
usage
fi
#login to azure using your credentials
az account show 1> /dev/null
if [ $? != 0 ];
then
az login
fi
#set the default subscription id
az account set --subscription $subscriptionId
set +e
#Check for existing RG
az group show --name $resourceGroupName 1> /dev/null
if [ $? != 0 ]; then
echo "Resource group with name" $resourceGroupName "could not be found. Creating new resource group.."
set -e
(
set -x
az group create --name $resourceGroupName --location $resourceGroupLocation 1> /dev/null
)
else
echo "Using existing resource group..."
fi
#Start deployment
echo "Starting deployment..."
(
set -x
az group deployment create --name "$deploymentName" --resource-group "$resourceGroupName" --template-file "$templateFilePath" --parameters "@${parametersFilePath}"
)
if [ $? == 0 ];
then
echo "Template has been successfully deployed"
fi

Просмотреть файл

@ -1,71 +0,0 @@
require 'azure_mgmt_resources'
class Deployer
# Initialize the deployer class with subscription, resource group and resource group location. The class will raise an
# ArgumentError if there are empty values for Tenant Id, Client Id or Client Secret environment variables.
#
# @param [String] subscription_id the subscription to deploy the template
# @param [String] resource_group the resource group to create or update and then deploy the template
# @param [String] resource_group_location the location of the resource group
def initialize(subscription_id, resource_group, resource_group_location)
raise ArgumentError.new("Missing template file 'template.json' in current directory.") unless File.exist?('template.json')
raise ArgumentError.new("Missing parameters file 'parameters.json' in current directory.") unless File.exist?('parameters.json')
@resource_group = resource_group
@subscription_id = subscription_id
@resource_group_location = resource_group_location
provider = MsRestAzure::ApplicationTokenProvider.new(
ENV['AZURE_TENANT_ID'],
ENV['AZURE_CLIENT_ID'],
ENV['AZURE_CLIENT_SECRET'])
credentials = MsRest::TokenCredentials.new(provider)
@client = Azure::ARM::Resources::ResourceManagementClient.new(credentials)
@client.subscription_id = @subscription_id
end
# Deploy the template to a resource group
def deploy
# ensure the resource group is created
params = Azure::ARM::Resources::Models::ResourceGroup.new.tap do |rg|
rg.location = @resource_group_location
end
@client.resource_groups.create_or_update(@resource_group, params).value!
# build the deployment from a json file template from parameters
template = File.read(File.expand_path(File.join(__dir__, 'template.json')))
deployment = Azure::ARM::Resources::Models::Deployment.new
deployment.properties = Azure::ARM::Resources::Models::DeploymentProperties.new
deployment.properties.template = JSON.parse(template)
deployment.properties.mode = Azure::ARM::Resources::Models::DeploymentMode::Incremental
# build the deployment template parameters from Hash to {key: {value: value}} format
deploy_params = File.read(File.expand_path(File.join(__dir__, 'parameters.json')))
deployment.properties.parameters = JSON.parse(deploy_params)["parameters"]
# put the deployment to the resource group
@client.deployments.create_or_update(@resource_group, 'azure-sample', deployment)
end
end
# Get user inputs and execute the script
if(ARGV.empty?)
puts "Please specify subscriptionId resourceGroupName resourceGroupLocation as command line arguments"
exit
end
subscription_id = ARGV[0] # Azure Subscription Id
resource_group = ARGV[1] # The resource group for deployment
resource_group_location = ARGV[2] # The resource group location
msg = "\nInitializing the Deployer class with subscription id: #{subscription_id}, resource group: #{resource_group}"
msg += "\nand resource group location: #{resource_group_location}...\n\n"
puts msg
# Initialize the deployer class
deployer = Deployer.new(subscription_id, resource_group, resource_group_location)
puts "Beginning the deployment... \n\n"
# Deploy the template
deployment = deployer.deploy
puts "Done deploying!!"

Просмотреть файл

@ -1,24 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"location": {
"value": "northeurope"
},
"storageAccountName": {
"value": "deeikelestorage"
},
"accountType": {
"value": "Standard_RAGRS"
},
"kind": {
"value": "StorageV2"
},
"accessTier": {
"value": "Hot"
},
"supportsHttpsTrafficOnly": {
"value": true
}
}
}

Просмотреть файл

@ -1,48 +0,0 @@
{
"$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"location": {
"type": "String"
},
"storageAccountName": {
"type": "String"
},
"accountType": {
"type": "String"
},
"kind": {
"type": "String"
},
"accessTier": {
"type": "String"
},
"supportsHttpsTrafficOnly": {
"type": "Bool"
}
},
"variables": {},
"resources": [
{
"type": "Microsoft.Storage/storageAccounts",
"sku": {
"name": "[parameters('accountType')]"
},
"kind": "[parameters('kind')]",
"name": "[parameters('storageAccountName')]",
"apiVersion": "2018-07-01",
"location": "[parameters('location')]",
"properties": {
"accessTier": "[parameters('accessTier')]",
"supportsHttpsTrafficOnly": "[parameters('supportsHttpsTrafficOnly')]"
},
"dependsOn": []
}
],
"outputs": {
"resourceId": {
"value": "[resourceId('Microsoft.Storage/storageAccounts', parameters('storageAccountName'))]",
"type": "string"
}
}
}

Просмотреть файл

@ -1,4 +0,0 @@
Column Index,Column Name,Type of Variable,"Values (range, levels, examples, etc)",Short Description,Joining Keys with others datasets?
1,,,,,
2,,,,,
3,,,,,
1 Column Index Column Name Type of Variable Values (range, levels, examples, etc) Short Description Joining Keys with others datasets?
2 1
3 2
4 3

Просмотреть файл

@ -1,17 +0,0 @@
# Data Dictionaries
_Place to put data description documents, typically received from a client_
This is typically a field-level description of data files received.
This document provides the descriptions of the data that is provided by the client. If the client is providing data dictionaries in text (in emails or text files), directly copy them here, or have a snapshot of the text, and add it here as an image. If the client is providing data dictionaries in Excel worksheets, directly put the Excel files in this directory, and add a link to this Excel file.
If the client is providing you the data from a database-like data management system, you can also copy and paste the data schema (snapshot) here. If necessary, please also provide brief description of each column after the snapshot image, if such image does not have such information.
## <Dataset 1 name (from database)\>
_Example image of data schema when data is from a sql server_
![](data-dictionary-from-sql-table.PNG)
## <Dataset 2 name (dictionary in Excel file)\>
[dataset 2 with dictionary in Excel](./Raw-Data-Dictionary.csv)

Двоичный файл не отображается.

До

Ширина:  |  Высота:  |  Размер: 17 KiB

Просмотреть файл

@ -1,43 +0,0 @@
# Data and Feature Definitions
This document provides a central hub for the raw data sources, the processed/transformed data, and feature sets. More details of each dataset is provided in the data summary report.
For each data, an individual report describing the data schema, the meaning of each data field, and other information that is helpful for understanding the data is provided. If the dataset is the output of processing/transforming/feature engineering existing data set(s), the names of the input data sets, and the links to scripts that are used to conduct the operation are also provided.
When applicable, the Interactive Data Exploration, Analysis, and Reporting (IDEAR) utility developed by Microsoft is applied to explore and visualize the data, and generate the data report. Instructions of how to use IDEAR can be found [here]().
For each dataset, the links to the sample datasets in the _**Data**_ directory are also provided.
_**For ease of modifying this report, placeholder links are included in this page, for example a link to dataset 1, but they are just placeholders pointing to a non-existent page. These should be modified to point to the actual location.**_
## Raw Data Sources
| Dataset Name | Original Location | Destination Location | Data Movement Tools / Scripts | Link to Report |
| ---:| ---: | ---: | ---: | -----: |
| Dataset 1 | Brief description of its orignal location | Brief description of its destination location | [script1.py](link/to/python/script/file/in/Code) | [Dataset 1 Report](link/to/report1)|
| Dataset 2 | Brief description of its orignal location | Brief description of its destination location | [script2.R](link/to/R/script/file/in/Code) | [Dataset 2 Report](link/to/report2)|
* Dataset1 summary. <Provide brief summary of the data, such as how to access the data. More detailed information should be in the Dataset1 Report.>
* Dataset2 summary. <Provide brief summary of the data, such as how to access the data. More detailed information should be in the Dataset2 Report.>
## Processed Data
| Processed Dataset Name | Input Dataset(s) | Data Processing Tools/Scripts | Link to Report |
| ---:| ---: | ---: | ---: |
| Processed Dataset 1 | [Dataset1](link/to/dataset1/report), [Dataset2](link/to/dataset2/report) | [Python_Script1.py](link/to/python/script/file/in/Code) | [Processed Dataset 1 Report](link/to/report1)|
| Processed Dataset 2 | [Dataset2](link/to/dataset2/report) |[script2.R](link/to/R/script/file/in/Code) | [Processed Dataset 2 Report](link/to/report2)|
* Processed Data1 summary. <Provide brief summary of the processed data, such as why you want to process data in this way. More detailed information about the processed data should be in the Processed Data1 Report.>
* Processed Data2 summary. <Provide brief summary of the processed data, such as why you want to process data in this way. More detailed information about the processed data should be in the Processed Data2 Report.>
## Feature Sets
| Feature Set Name | Input Dataset(s) | Feature Engineering Tools/Scripts | Link to Report |
| ---:| ---: | ---: | ---: |
| Feature Set 1 | [Dataset1](link/to/dataset1/report), [Processed Dataset2](link/to/dataset2/report) | [R_Script2.R](link/to/R/script/file/in/Code) | [Feature Set1 Report](link/to/report1)|
| Feature Set 2 | [Processed Dataset2](link/to/dataset2/report) |[SQL_Script2.sql](link/to/sql/script/file/in/Code) | [Feature Set2 Report](link/to/report2)|
* Feature Set1 summary. <Provide detailed description of the feature set, such as the meaning of each feature. More detailed information about the feature set should be in the Feature Set1 Report.>
* Feature Set2 summary. <Provide detailed description of the feature set, such as the meaning of each feature. More detailed information about the feature set should be in the Feature Set2 Report.>

Просмотреть файл

@ -1,3 +0,0 @@
# Data Pipeline
Describe the data pipeline and provide a logical diagram. List how frequently the data is moved - real time/stream, near real time, batched with frequency etc.

Просмотреть файл

@ -1,18 +0,0 @@
# Data Report
This file will be generated for each data file received or processed. The Interactive Data Exploration, Analysis, and Reporting (IDEAR) utility developed by TDSP team of Microsoft can help you explore and visualize the data in an interactive way, and generate the data report along with the process of exploration and visualization.
IDEAR allows you to output the data summary, statistics, and charts that you want to use to tell the data story into the report. You only need to click a few buttons, and the report will be generated for you.
## General summary of the data
## Data quality summary
## Target variable
## Individual variables
## Variable ranking
## Relationship between explanatory variables and target variable

Просмотреть файл

@ -1,2 +0,0 @@
# DataReport Folder
_Location to place documents describing results of data exploration_

Просмотреть файл

@ -1,46 +0,0 @@
# Baseline Model Report
_Baseline model is the the model a data scientist would train and evaluate quickly after he/she has the first (preliminary) feature set ready for the machine learning modeling. Through building the baseline model, the data scientist can have a quick assessment of the feasibility of the machine learning task._
When applicable, the Automated Modeling and Reporting utility developed by TDSP team of Microsoft is employed to build the baseline models quickly. The baseline model report is generated from this utility easily.
> If using the Automated Modeling and Reporting tool, most of the sections below will be generated automatically from this tool.
## Analytic Approach
* What is target definition
* What are inputs (description)
* What kind of model was built?
## Model Description
* Models and Parameters
* Description or images of data flow graph
* if AzureML, link to:
* Training experiment
* Scoring workflow
* What learner(s) were used?
* Learner hyper-parameters
## Results (Model Performance)
* ROC/Lift charts, AUC, R^2, MAPE as appropriate
* Performance graphs for parameters sweeps if applicable
## Model Understanding
* Variable Importance (significance)
* Insight Derived from the Model
## Conclusion and Discussions for Next Steps
* Conclusion on Feasibility Assessment of the Machine Learning Task
* Discussion on Overfitting (If Applicable)
* What other Features Can Be Generated from the Current Data
* What other Relevant Data Sources Are Available to Help the Modeling

Просмотреть файл

@ -1,34 +0,0 @@
# Final Model Report
_Report describing the final model to be delivered - typically comprised of one or more of the models built during the life of the project_
## Analytic Approach
* What is target definition
* What are inputs (description)
* What kind of model was built?
## Solution Description
* Simple solution architecture (Data sources, solution components, data flow)
* What is output?
## Data
* Source
* Data Schema
* Sampling
* Selection (dates, segments)
* Stats (counts)
## Features
* List of raw and derived features
* Importance ranking.
## Algorithm
* Description or images of data flow graph
* if AzureML, link to:
* Training experiment
* Scoring workflow
* What learner(s) were used?
* Learner hyper-parameters
## Results
* ROC/Lift charts, AUC, R^2, MAPE as appropriate
* Performance graphs for parameters sweeps if applicable

Просмотреть файл

@ -1,42 +0,0 @@
# Model Report
_A report to provide details on a specific experiment (model) - possibly one of many_
If applicable, the Automated Modeling and Reporting utility developed by Microsoft TDSP team can be used to generate reports, which can provide contents for most of the sections in this model report.
## Analytic Approach
* What is target definition
* What are inputs (description)
* What kind of model was built?
## Model Description
* Models and Parameters
* Description or images of data flow graph
* if AzureML, link to:
* Training experiment
* Scoring workflow
* What learner(s) were used?
* Learner hyper-parameters
## Results (Model Performance)
* ROC/Lift charts, AUC, R^2, MAPE as appropriate
* Performance graphs for parameters sweeps if applicable
## Model Understanding
* Variable Importance (significance)
* Insight Derived from the Model
## Conclusion and Discussions for Next Steps
* Conclusion
* Discussion on overfitting (if applicable)
* What other Features Can Be Generated from the Current Data
* What other Relevant Data Sources Are Available to Help the Modeling

Просмотреть файл

@ -1 +0,0 @@
# Folder for hosting all documents and reports related to modeling

Просмотреть файл

@ -1,54 +0,0 @@
# Project Charter
## Business background
* Who is the client, what business domain the client is in.
* What business problems are we trying to address?
## Scope
* What data science solutions are we trying to build?
* What will we do?
* How is it going to be consumed by the customer?
## Personnel
* Who are on this project:
* Microsoft:
* Project lead
* PM
* Data scientist(s)
* Account manager
* Client:
* Data administrator
* Business contact
## Metrics
* What are the qualitative objectives? (e.g. reduce user churn)
* What is a quantifiable metric (e.g. reduce the fraction of users with 4-week inactivity)
* Quantify what improvement in the values of the metrics are useful for the customer scenario (e.g. reduce the fraction of users with 4-week inactivity by 20%)
* What is the baseline (current) value of the metric? (e.g. current fraction of users with 4-week inactivity = 60%)
* How will we measure the metric? (e.g. A/B test on a specified subset for a specified period; or comparison of performance after implementation to baseline)
## Plan
* Phases (milestones), timeline, short description of what we'll do in each phase.
## Architecture
* Data
* What data do we expect? Raw data in the customer data sources (e.g. on-prem files, SQL, on-prem Hadoop etc.)
* Data movement from on-prem to Azure using ADF or other data movement tools (Azcopy, EventHub etc.) to move either
* all the data,
* after some pre-aggregation on-prem,
* Sampled data enough for modeling
* What tools and data storage/analytics resources will be used in the solution e.g.,
* ASA for stream aggregation
* HDI/Hive/R/Python for feature construction, aggregation and sampling
* AzureML for modeling and web service operationalization
* How will the score or operationalized web service(s) (RRS and/or BES) be consumed in the business workflow of the customer? If applicable, write down pseudo code for the APIs of the web service calls.
* How will the customer use the model results to make decisions
* Data movement pipeline in production
* Make a 1 slide diagram showing the end to end data flow and decision architecture
* If there is a substantial change in the customer's business workflow, make a before/after diagram showing the data flow.
## Communication
* How will we keep in touch? Weekly meetings?
* Who are the contact persons on both sides?

Просмотреть файл

@ -1,64 +0,0 @@
# Exit Report of Project <X> for Customer <Y>
Instructions: Template for exit criteria for data science projects. This is concise document that includes an overview of the entire project, including details of each stage and learning. If a section isn't applicable (e.g. project didn't include a ML model), simply mark that section as "Not applicable". Suggested length between 5-20 pages. Code should mostly be within code repository (not in this document).
Customer: <Enter Customer Name\>
Team Members: <Enter team member' names. Please also enter relevant parties names, such as team lead, Account team, Business stakeholders, etc.\>
## Overview
<Executive summary of entire solution, brief non-technical overview\>
## Business Domain
<Industry, business domain of customer\>
## Business Problem
<Business problem and exact use case(s), why it matters\>
## Data Processing
<Schema of original datasets, how data was processed, final input data schema for model\>
## Modeling, Validation
<Modeling techniques used, validation results, details of how validation conducted\>
## Solution Architecture
<Architecture of the solution, describe clearly whether this was actually implemented or a proposed architecture. Include diagram and relevant details for reproducing similar architecture. Include details of why this architecture was chosen versus other architectures that were considered, if relevant\>
## Benefits
### Company Benefit (internal only. Double check if you want to share this with your customer)
<What did our company gain from this engagement? ROI, revenue, etc\>
### Customer Benefit
What is the benefit (ROI, savings, productivity gains etc) for the customer? If just POC, what is estimated ROI? If exact metrics are not available, why does it have impact for the customer?\>
## Learnings
### Project Execution
<Learnings around the customer engagement process\>
### Data science / Engineering
<Learnings related to data science/engineering, tips/tricks, etc\>
### Domain
<Learnings around the business domain, \>
### Product
<Learnings around the products and services utilized in the solution \>
### What's unique about this project, specific challenges
<Specific issues or setup, unique things, specific challenges that had to be addressed during the engagement and how that was accomplished\>
## Links
<Links to published case studies, etc.; Link to git repository where all code sits\>
## Next Steps
<Next steps. These should include milestones for follow-ups and who 'owns' this action. E.g. Post- Proof of Concept check-in on status on 12/1/2016 by X, monthly check-in meeting by Y, etc.\>
## Appendix
<Other material that seems relevant try to keep non-appendix to <20 pages but more details can be included in appendix if needed\>

Просмотреть файл

@ -1,12 +0,0 @@
# Folder for hosting project documents and reports for a Data Science Project
These could be:
1. Project management and planning docs
2. System architecture
3. Information obtained from a business owner or client about the project
4. Docs and presentations prepared to share information about hte project
In this folder we have templates for project chater and exit report.
In addition, if you have access to Microsoft Project or Excel, you may use project templates provided in this [blog](https://blogs.msdn.microsoft.com/buckwoody/2017/10/24/a-data-science-microsoft-project-template-you-can-use-in-your-solutions).

Двоичные данные
Docs/Project/System Architecture.docx

Двоичный файл не отображается.

Просмотреть файл

@ -1,115 +0,0 @@
# The script to run.
script: train.py
# The arguments to the script file.
arguments: []
# The name of the compute target to use for this run.
target: local
# Framework to execute inside. Allowed values are "Python" , "PySpark", "CNTK", "TensorFlow", and "PyTorch".
framework: PySpark
# Communicator for the given framework. Allowed values are "None" , "ParameterServer", "OpenMpi", and "IntelMpi".
communicator: None
# Automatically prepare the run environment as part of the run itself.
autoPrepareEnvironment: true
# Maximum allowed duration for the run.
maxRunDurationSeconds:
# Number of nodes to use for running job.
nodeCount: 1
# Environment details.
environment:
# Environment variables set for the run.
environmentVariables:
EXAMPLE_ENV_VAR: EXAMPLE_VALUE
# Python details
python:
# user_managed_dependencies=True indicates that the environmentwill be user managed. False indicates that AzureML willmanage the user environment.
userManagedDependencies: false
# The python interpreter path
interpreterPath: python
# Path to the conda dependencies file to use for this run. If a project
# contains multiple programs with different sets of dependencies, it may be
# convenient to manage those environments with separate files.
condaDependenciesFile: ../conda_dependencies.yml
# Docker details
docker:
# Set True to perform this run inside a Docker container.
enabled: true
# Base image used for Docker-based runs.
baseImage: mcr.microsoft.com/azureml/base:0.2.0
# Set False if necessary to work around shared volume bugs.
sharedVolumes: true
# Run with NVidia Docker extension to support GPUs.
gpuSupport: false
# Extra arguments to the Docker run command.
arguments: []
# Image registry that contains the base image.
baseImageRegistry:
# DNS name or IP address of azure container registry(ACR)
address:
# The username for ACR
username:
# The password for ACR
password:
# Spark details
spark:
# List of spark repositories.
repositories:
- https://mmlspark.azureedge.net/maven
packages:
- group: com.microsoft.ml.spark
artifact: mmlspark_2.11
version: '0.12'
precachePackages: true
# Databricks details
databricks:
# List of maven libraries.
mavenLibraries: []
# List of PyPi libraries
pypiLibraries: []
# List of RCran libraries
rcranLibraries: []
# List of JAR libraries
jarLibraries: []
# List of Egg libraries
eggLibraries: []
# History details.
history:
# Enable history tracking -- this allows status, logs, metrics, and outputs
# to be collected for a run.
outputCollection: true
# whether to take snapshots for history.
snapshotProject: true
# Spark configuration details.
spark:
configuration:
spark.app.name: Azure ML Experiment
spark.yarn.maxAppAttempts: 1
# HDI details.
hdi:
# Yarn deploy mode. Options are cluster and client.
yarnDeployMode: cluster
# Tensorflow details.
tensorflow:
# The number of worker tasks.
workerCount: 1
# The number of parameter server tasks.
parameterServerCount: 1
# Mpi details.
mpi:
# When using MPI, number of processes per node.
processCountPerNode: 1
# data reference configuration details
dataReferences: {}
# Project share datastore reference.
sourceDirectoryDataStore:
# AmlCompute details.
amlcompute:
# VM size of the Cluster to be created.Allowed values are Azure vm sizes.The list of vm sizes is available in 'https://docs.microsoft.com/en-us/azure/cloud-services/cloud-services-sizes-specs
vmSize:
# VM priority of the Cluster to be created.Allowed values are "dedicated" , "lowpriority".
vmPriority:
# A bool that indicates if the cluster has to be retained after job completion.
retainCluster: false
# Name of the cluster to be created. If not specified, runId will be used as cluster name.
name:
# Maximum number of nodes in the AmlCompute cluster to be created. Minimum number of nodes will always be set to 0.
clusterMaxNodeCount: 1

Просмотреть файл

@ -1,126 +0,0 @@
# The script to run.
script: train.py
# The arguments to the script file.
arguments: []
# The name of the compute target to use for this run.
target: dsvmcluster
# Framework to execute inside. Allowed values are "Python" , "PySpark", "CNTK", "TensorFlow", and "PyTorch".
framework: Python
# Communicator for the given framework. Allowed values are "None" , "ParameterServer", "OpenMpi", and "IntelMpi".
communicator: None
# Automatically prepare the run environment as part of the run itself.
autoPrepareEnvironment: true
# Maximum allowed duration for the run.
maxRunDurationSeconds:
# Number of nodes to use for running job.
nodeCount: 1
# Environment details.
environment:
# Environment variables set for the run.
environmentVariables:
EXAMPLE_ENV_VAR: EXAMPLE_VALUE
# Python details
python:
# user_managed_dependencies=True indicates that the environmentwill be user managed. False indicates that AzureML willmanage the user environment.
userManagedDependencies: false
# The python interpreter path
interpreterPath: python
# Path to the conda dependencies file to use for this run. If a project
# contains multiple programs with different sets of dependencies, it may be
# convenient to manage those environments with separate files.
condaDependenciesFile: ../conda_dependencies.yml
# Docker details
docker:
# Set True to perform this run inside a Docker container.
enabled: true
# Base image used for Docker-based runs.
baseImage: mcr.microsoft.com/azureml/base:0.2.0
# Set False if necessary to work around shared volume bugs.
sharedVolumes: true
# Run with NVidia Docker extension to support GPUs.
gpuSupport: false
# Extra arguments to the Docker run command.
arguments: []
# Image registry that contains the base image.
baseImageRegistry:
# DNS name or IP address of azure container registry(ACR)
address:
# The username for ACR
username:
# The password for ACR
password:
# Spark details
spark:
# List of spark repositories.
repositories:
- https://mmlspark.azureedge.net/maven
packages:
- group: com.microsoft.ml.spark
artifact: mmlspark_2.11
version: '0.12'
precachePackages: true
# Databricks details
databricks:
# List of maven libraries.
mavenLibraries: []
# List of PyPi libraries
pypiLibraries: []
# List of RCran libraries
rcranLibraries: []
# List of JAR libraries
jarLibraries: []
# List of Egg libraries
eggLibraries: []
# History details.
history:
# Enable history tracking -- this allows status, logs, metrics, and outputs
# to be collected for a run.
outputCollection: true
# whether to take snapshots for history.
snapshotProject: true
# Spark configuration details.
spark:
configuration:
spark.app.name: Azure ML Experiment
spark.yarn.maxAppAttempts: 1
# HDI details.
hdi:
# Yarn deploy mode. Options are cluster and client.
yarnDeployMode: cluster
# Tensorflow details.
tensorflow:
# The number of worker tasks.
workerCount: 1
# The number of parameter server tasks.
parameterServerCount: 1
# Mpi details.
mpi:
# When using MPI, number of processes per node.
processCountPerNode: 1
# data reference configuration details
dataReferences:
workspaceblobstore:
# Name of the datastore.
dataStoreName: workspaceblobstore
# relative path on the datastore.
pathOnDataStore:
# operation on the datastore, mount, download, upload
mode: mount
# whether to overwrite the data if existing
overwrite: false
# the path on the compute target.
pathOnCompute:
# Project share datastore reference.
sourceDirectoryDataStore:
# AmlCompute details.
amlcompute:
# VM size of the Cluster to be created.Allowed values are Azure vm sizes.The list of vm sizes is available in 'https://docs.microsoft.com/en-us/azure/cloud-services/cloud-services-sizes-specs
vmSize:
# VM priority of the Cluster to be created.Allowed values are "dedicated" , "lowpriority".
vmPriority:
# A bool that indicates if the cluster has to be retained after job completion.
retainCluster: false
# Name of the cluster to be created. If not specified, runId will be used as cluster name.
name:
# Maximum number of nodes in the AmlCompute cluster to be created. Minimum number of nodes will always be set to 0.
clusterMaxNodeCount: 1

Просмотреть файл

@ -1,126 +0,0 @@
# The script to run.
script: train.py
# The arguments to the script file.
arguments: []
# The name of the compute target to use for this run.
target: local
# Framework to execute inside. Allowed values are "Python" , "PySpark", "CNTK", "TensorFlow", and "PyTorch".
framework: Python
# Communicator for the given framework. Allowed values are "None" , "ParameterServer", "OpenMpi", and "IntelMpi".
communicator: None
# Automatically prepare the run environment as part of the run itself.
autoPrepareEnvironment: true
# Maximum allowed duration for the run.
maxRunDurationSeconds:
# Number of nodes to use for running job.
nodeCount: 1
# Environment details.
environment:
# Environment variables set for the run.
environmentVariables:
EXAMPLE_ENV_VAR: EXAMPLE_VALUE
# Python details
python:
# user_managed_dependencies=True indicates that the environmentwill be user managed. False indicates that AzureML willmanage the user environment.
userManagedDependencies: false
# The python interpreter path
interpreterPath: python
# Path to the conda dependencies file to use for this run. If a project
# contains multiple programs with different sets of dependencies, it may be
# convenient to manage those environments with separate files.
condaDependenciesFile: ../conda_dependencies.yml
# Docker details
docker:
# Set True to perform this run inside a Docker container.
enabled: false
# Base image used for Docker-based runs.
baseImage: mcr.microsoft.com/azureml/base:0.2.0
# Set False if necessary to work around shared volume bugs.
sharedVolumes: true
# Run with NVidia Docker extension to support GPUs.
gpuSupport: false
# Extra arguments to the Docker run command.
arguments: []
# Image registry that contains the base image.
baseImageRegistry:
# DNS name or IP address of azure container registry(ACR)
address:
# The username for ACR
username:
# The password for ACR
password:
# Spark details
spark:
# List of spark repositories.
repositories:
- https://mmlspark.azureedge.net/maven
packages:
- group: com.microsoft.ml.spark
artifact: mmlspark_2.11
version: '0.12'
precachePackages: true
# Databricks details
databricks:
# List of maven libraries.
mavenLibraries: []
# List of PyPi libraries
pypiLibraries: []
# List of RCran libraries
rcranLibraries: []
# List of JAR libraries
jarLibraries: []
# List of Egg libraries
eggLibraries: []
# History details.
history:
# Enable history tracking -- this allows status, logs, metrics, and outputs
# to be collected for a run.
outputCollection: true
# whether to take snapshots for history.
snapshotProject: true
# Spark configuration details.
spark:
configuration:
spark.app.name: Azure ML Experiment
spark.yarn.maxAppAttempts: 1
# HDI details.
hdi:
# Yarn deploy mode. Options are cluster and client.
yarnDeployMode: cluster
# Tensorflow details.
tensorflow:
# The number of worker tasks.
workerCount: 1
# The number of parameter server tasks.
parameterServerCount: 1
# Mpi details.
mpi:
# When using MPI, number of processes per node.
processCountPerNode: 1
# data reference configuration details
dataReferences:
workspaceblobstore:
# Name of the datastore.
dataStoreName: workspaceblobstore
# relative path on the datastore.
pathOnDataStore:
# operation on the datastore, mount, download, upload
mode: download
# whether to overwrite the data if existing
overwrite: false
# the path on the compute target.
pathOnCompute:
# Project share datastore reference.
sourceDirectoryDataStore:
# AmlCompute details.
amlcompute:
# VM size of the Cluster to be created.Allowed values are Azure vm sizes.The list of vm sizes is available in 'https://docs.microsoft.com/en-us/azure/cloud-services/cloud-services-sizes-specs
vmSize:
# VM priority of the Cluster to be created.Allowed values are "dedicated" , "lowpriority".
vmPriority:
# A bool that indicates if the cluster has to be retained after job completion.
retainCluster: false
# Name of the cluster to be created. If not specified, runId will be used as cluster name.
name:
# Maximum number of nodes in the AmlCompute cluster to be created. Minimum number of nodes will always be set to 0.
clusterMaxNodeCount: 1

Просмотреть файл

@ -9,8 +9,8 @@ dependencies:
- matplotlib==3.0.2
- utils==0.9.0
# Required packages for AzureML execution, history, and data preparation.
- azureml-sdk==1.0.10
- azureml-defaults==1.0.10
- azureml-sdk==1.0.85
- azureml-defaults==1.0.85
- azure-cli==2.0.58
# Dev Tools
- setuptools

Просмотреть файл

@ -0,0 +1,25 @@
import os
from azureml.core import Workspace
from azureml.core.authentication import AzureCliAuthentication
# Create the workspace using the specified parameters
ws = Workspace.create(
name='azuremlworkshopws',
subscription_id='<subscription>',
resource_group='azuremlworkshoprgp',
location='westeurope',
create_resource_group=True,
sku='basic',
exist_ok=True,
auth=AzureCliAuthentication()
)
print(ws.get_details())
# write the details of the workspace to a configuration file in the project root
ws.write_config(
path=os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'../'
)
)

43
labs/01_setup.md Normal file
Просмотреть файл

@ -0,0 +1,43 @@
# Lab 1: setting up the environment
In this first lab, we'll set up our working environment.
## Requirements
* Visual Studio Code
Download and Install [Visual Studio Code](https://code.visualstudio.com/)
* Miniconda
Download and install [Miniconda](https://docs.conda.io/en/latest/miniconda.html)
* Azure ML SDK
From a command line window, run the following command to install the python client package for Azure ML: `pip install azureml-sdk`
* Azure CLI
From a command line window, run the following command to install the Azure CLI, used for authentication and management tasks: `pip install azure-cli`
* A git client to clone the lab content
For example Git SCM - https://git-scm.com/.
## Clone the repository
Clone the following git repository: git clone https://github.com/Azure/MLOps-TDSP-Template
## Open the cloned git repository in VS Code or your favorite IDE
## Az Login
From a terminal, login to your subscription on Azure using the azure cli.
* `az login`
If you have multiple subscriptions, you might want to set the right subscription by using the following command.
* `az account set -s <subid>`
## Deploy an ML workspace and dependent resources
Execute the script `infrastructure/create_mlworkspace.py` to deploy the ML workspace resource and dependent resources such as a Keyvault instance and a Storage Account.
## Browse through the created resources in the portal
You can now take a look over the created resources via the [Azure Portal](http://portal.azure.com/).

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше