Removed deprecated sample extension

This commit is contained in:
Jeff Shepherd 2022-08-19 16:30:49 -07:00
Родитель b197129aa4
Коммит d88d4df6a6
20 изменённых файлов: 0 добавлений и 1872 удалений

Просмотреть файл

@ -1,74 +0,0 @@
#!/usr/bin/python
import sys
import Adafruit_DHT
import random
import time
import iothub_client
# pylint: disable=E0611
from iothub_client import IoTHubClient, IoTHubClientError, IoTHubTransportProvider, IoTHubClientResult
from iothub_client import IoTHubMessage, IoTHubMessageDispositionResult, IoTHubError, DeviceMethodReturnValue
CONNECTION_STRING = "YourConnectionString"
# Using the MQTT protocol.
PROTOCOL = IoTHubTransportProvider.MQTT
MESSAGE_TIMEOUT = 10000
# Define the JSON message to send to IoT Hub.
TEMPERATURE = 20.0
HUMIDITY = 60
MSG_TXT = "{\"temperature\": %.2f,\"humidity\": %.2f}"
sensor_args = { '11': Adafruit_DHT.DHT11,
'22': Adafruit_DHT.DHT22,
'2302': Adafruit_DHT.AM2302 }
def send_confirmation_callback(message, result, user_context):
print ( "IoT Hub responded to message with status: %s" % (result) )
def iothub_client_init():
# Create an IoT Hub client
client = IoTHubClient(CONNECTION_STRING, PROTOCOL)
return client
def get_sensor_data(sensor,pin):
return Adafruit_DHT.read_retry(sensor, pin)
def data_loop(sensor,pin):
try:
client = iothub_client_init()
print ( "IoT Hub device sending periodic messages, press Ctrl-C to exit" )
count = 0
while True:
# Build the message with simulated telemetry values.
humidity, temperature = get_sensor_data(sensor,pin)
msg_txt_formatted = MSG_TXT % (temperature, humidity)
message = IoTHubMessage(msg_txt_formatted)
# Optional due to edgeHub error
message.message_id = "message_%d" % count
message.correlation_id = "correlation_%d" % count
count += 1
# Add a custom application property to the message.
# An IoT hub can filter on these properties without access to the message body.
prop_map = message.properties()
if temperature > 30:
prop_map.add("temperatureAlert", "true")
else:
prop_map.add("temperatureAlert", "false")
# Send the message.
print( "Sending message: %s" % message.get_string() )
client.send_event_async(message, send_confirmation_callback, None)
time.sleep(1)
except IoTHubError as iothub_error:
print ( "Unexpected error %s from IoTHub" % iothub_error )
return
except KeyboardInterrupt:
print ( "IoTHubClient sample stopped" )
if __name__ == '__main__':
print ( "Sensor data" )
print ( "Press Ctrl-C to exit" )
sensor = sensor_args[sys.argv[1]]
pin = sys.argv[2]
data_loop(sensor,pin)

Просмотреть файл

@ -1,76 +0,0 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
import random
import time
import sys
# Using the Python Device SDK for IoT Hub:
# https://github.com/Azure/azure-iot-sdk-python
# The sample connects to a device-specific MQTT endpoint on your IoT Hub.
import iothub_client
# pylint: disable=E0611
from iothub_client import IoTHubClient, IoTHubClientError, IoTHubTransportProvider, IoTHubClientResult
from iothub_client import IoTHubMessage, IoTHubMessageDispositionResult, IoTHubError, DeviceMethodReturnValue
# The device connection string to authenticate the device with your IoT hub.
# Using the Azure CLI:
# az iot hub device-identity show-connection-string --hub-name {YourIoTHubName} --device-id MyNodeDevice --output table
CONNECTION_STRING = "YourConnectionString"
# Using the MQTT protocol.
PROTOCOL = IoTHubTransportProvider.MQTT
MESSAGE_TIMEOUT = 10000
# Define the JSON message to send to IoT Hub.
TEMPERATURE = 20.0
HUMIDITY = 60
MSG_TXT = "{\"temperature\": %.2f,\"humidity\": %.2f}"
def send_confirmation_callback(message, result, user_context):
print ( "IoT Hub responded to message with status: %s" % (result) )
def iothub_client_init():
# Create an IoT Hub client
client = IoTHubClient(CONNECTION_STRING, PROTOCOL)
return client
def iothub_client_telemetry_sample_run():
try:
client = iothub_client_init()
print ( "IoT Hub device sending periodic messages, press Ctrl-C to exit" )
count = 0
while True:
# Build the message with simulated telemetry values.
temperature = TEMPERATURE + (random.random() * 15)
humidity = HUMIDITY + (random.random() * 20)
msg_txt_formatted = MSG_TXT % (temperature, humidity)
message = IoTHubMessage(msg_txt_formatted)
### Optional
message.message_id = "message_%d" % count
message.correlation_id = "correlation_%d" % count
# Add a custom application property to the message.
# An IoT hub can filter on these properties without access to the message body.
prop_map = message.properties()
if temperature > 30:
prop_map.add("temperatureAlert", "true")
else:
prop_map.add("temperatureAlert", "false")
# Send the message.
print( "Sending message: %s" % message.get_string() )
client.send_event_async(message, send_confirmation_callback, None)
time.sleep(1)
except IoTHubError as iothub_error:
print ( "Unexpected error %s from IoTHub" % iothub_error )
return
except KeyboardInterrupt:
print ( "IoTHubClient sample stopped" )
if __name__ == '__main__':
print ( "IoT Hub Quickstart #1 - Simulated device" )
print ( "Press Ctrl-C to exit" )
iothub_client_telemetry_sample_run()

Просмотреть файл

@ -1,20 +0,0 @@
FROM ubuntu:xenial
WORKDIR /app
RUN apt-get update && \
apt-get install -y --no-install-recommends libcurl4-openssl-dev python-pip libboost-python-dev && \
rm -rf /var/lib/apt/lists/*
COPY requirements.txt ./
RUN pip install -r requirements.txt
COPY . .
RUN mkdir /myvol
RUN touch /myvol/data.json
RUN chmod 777 /myvol/data.json
VOLUME /myvol
RUN useradd -ms /bin/bash moduleuser
USER moduleuser
CMD [ "python", "-u", "./main.py" ]

Просмотреть файл

@ -1,98 +0,0 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for
# full license information.
import json
import random
import time
import sys
import iothub_client
from iothub_client import IoTHubModuleClient, IoTHubClientError, IoTHubTransportProvider
from iothub_client import IoTHubMessage, IoTHubMessageDispositionResult, IoTHubError
import os
# messageTimeout - the maximum time in milliseconds until a message times out.
# The timeout period starts at IoTHubModuleClient.send_event_async.
# By default, messages do not expire.
MESSAGE_TIMEOUT = 10000
# global counters
RECEIVE_CALLBACKS = 0
SEND_CALLBACKS = 0
TEMPERATURE_THRESHOLD = 25
TWIN_CALLBACKS = 0
# Choose HTTP, AMQP or MQTT as transport protocol. Currently only MQTT is supported.
PROTOCOL = IoTHubTransportProvider.MQTT
# Callback received when the message that we're forwarding is processed.
def send_confirmation_callback(message, result, user_context):
global SEND_CALLBACKS
print ( "Confirmation[%d] received for message with result = %s" % (user_context, result) )
map_properties = message.properties()
key_value_pair = map_properties.get_internals()
print ( " Properties: %s" % key_value_pair )
SEND_CALLBACKS += 1
print ( " Total calls confirmed: %d" % SEND_CALLBACKS )
# receive_message_callback is invoked when an incoming message arrives on the specified
# input queue (in the case of this sample, "input1"). Because this is a filter module,
# we forward this message to the "output1" queue.
def receive_message_callback(message, hubManager):
global RECEIVE_CALLBACKS
global TEMPERATURE_THRESHOLD
message_buffer = message.get_bytearray()
size = len(message_buffer)
message_text = message_buffer[:size].decode('utf-8')
print ( " Data: <<<%s>>> & Size=%d" % (message_text, size) )
map_properties = message.properties()
key_value_pair = map_properties.get_internals()
print ( " Properties: %s" % key_value_pair )
RECEIVE_CALLBACKS += 1
print ( " Total calls received: %d" % RECEIVE_CALLBACKS )
data = json.loads(message_text)
with open("/myvol/data.json", "a") as myfile:
myfile.write(json.dumps(data))
myfile.write('\n')
return IoTHubMessageDispositionResult.ACCEPTED
class HubManager(object):
def __init__(
self,
protocol=IoTHubTransportProvider.MQTT):
self.client_protocol = protocol
self.client = IoTHubModuleClient()
self.client.create_from_environment(protocol)
# set the time until a message times out
self.client.set_option("messageTimeout", MESSAGE_TIMEOUT)
# sets the callback when a message arrives on "input1" queue. Messages sent to
# other inputs or to the default will be silently discarded.
self.client.set_message_callback("input1", receive_message_callback, self)
# Forwards the message received onto the next stage in the process.
def forward_event_to_output(self, outputQueueName, event, send_context):
self.client.send_event_async(
outputQueueName, event, send_confirmation_callback, send_context)
def main(protocol):
try:
print ( "\nPython %s\n" % sys.version )
print ( "EdgeHub Extractor for Python" )
hub_manager = HubManager(protocol)
print ( "Starting the Extractor using protocol %s..." % hub_manager.client_protocol )
while True:
time.sleep(1)
except IoTHubError as iothub_error:
print ( "Unexpected error %s from IoTHub" % iothub_error )
return
except KeyboardInterrupt:
print ( "Extractor stopped" )
if __name__ == '__main__':
main(PROTOCOL)

Просмотреть файл

@ -1 +0,0 @@
azure-iothub-device-client==1.4.2

Двоичные данные
Extensions/IoT/Img/AddModule.PNG

Двоичный файл не отображается.

До

Ширина:  |  Высота:  |  Размер: 28 KiB

Двоичные данные
Extensions/IoT/Img/Architecture.PNG

Двоичный файл не отображается.

До

Ширина:  |  Высота:  |  Размер: 144 KiB

Двоичные данные
Extensions/IoT/Img/EdgeDevice.PNG

Двоичный файл не отображается.

До

Ширина:  |  Высота:  |  Размер: 41 KiB

Двоичные данные
Extensions/IoT/Img/Routes.PNG

Двоичный файл не отображается.

До

Ширина:  |  Высота:  |  Размер: 30 KiB

Двоичные данные
Extensions/IoT/Img/SetModules.PNG

Двоичный файл не отображается.

До

Ширина:  |  Высота:  |  Размер: 50 KiB

Просмотреть файл

@ -1,342 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Azure services for Iot\n",
"\n",
"## Summary \n",
"\n",
"An introduction to the Azure services involved in Iot Development and their respective deployment using the Azure-Python SDK\n",
"\n",
"\n",
"## Description\n",
"\n",
"The purpose of this notebook is to give an introduction to the different Azure services that are involved in Iot Solutions, and to deploy them programatically using python and the Azure-SDK.\n",
"\n",
"## Requirements\n",
"\n",
"* None\n",
"\n",
"## Documentation\n",
"\n",
"* Resource Groups: https://docs.microsoft.com/en-us/python/api/overview/azure/resources?view=azure-python\n",
"* Iot Hub: https://docs.microsoft.com/en-us/python/api/overview/azure/iot?view=azure-python\n",
"* CosmosDB: https://docs.microsoft.com/en-us/python/api/overview/azure/cosmosdb?view=azure-python\n",
"* Data Lake Store: https://docs.microsoft.com/en-us/python/api/overview/azure/data-lake-store?view=azure-python\n",
"* Data Lake Analytics: https://docs.microsoft.com/en-us/python/api/overview/azure/data-lake-analytics?view=azure-python\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"bash: line 2: /usr/bin/az: No such file or directory\n"
]
}
],
"source": [
"%%bash\n",
"\n",
"/usr/bin/az login"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"from azure.common.client_factory import get_client_from_cli_profile\n",
"subscription_id = 'your-subscription-id' ##you cand find this in the portal\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Resource group\n",
"\n",
"First of all we need to set up a resource group in order to store our services.\n",
"\n",
"Resource groups provide a way to monitor, control access, provision and manage billing for collections of assets that are required to run an application, or used by a client or company department."
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Creating Resource Group...\n",
"Resource Group done\n"
]
}
],
"source": [
"from azure.mgmt.resource import ResourceManagementClient\n",
"\n",
"resource_group = 'resource-group-name' ## Resource group naming \n",
"location = 'centralus' ## 'eastus2,northeurope,centralus,westeurope', etc..\n",
"\n",
"resource_client = get_client_from_cli_profile(ResourceManagementClient)\n",
"print('Creating Resource Group...')\n",
"resource_client.resource_groups.create_or_update(resource_group, {'location': location})\n",
"print('Resource Group done')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Iot Hub\n",
"Azure IoT Hub is a fully managed service that enables reliable and secure bidirectional communications between millions of IoT devices and a solution back end. \n",
"\n",
"Provides multiple device-to-cloud and cloud-to-device communication options. \n",
"These options include:\n",
"* one-way messaging, file transfer, and request-reply methods.\n",
"* Provides built-in declarative message routing to other Azure services.\n",
"* Provides a queryable store for device metadata and synchronized state information.\n",
"* Enables secure communications and access control using per-device security keys or X.509 certificates.\n",
"* Provides extensive monitoring for device connectivity and device identity management events.\n",
"* Includes device libraries for the most popular languages and platforms.\n",
" \n",
"IoTHub: Connect, monitor, and manage billions of IoT assets\n",
"* Establish bi-directional communication with billions of IoT devices\n",
"* Authenticate per device for security-enhanced IoT solutions\n",
"* Register devices at scale with IoT Hub Device Provisioning Service\n",
"* Manage your IoT devices at scale with device management\n",
"* tend the power of the cloud to your edge device\n",
"\n",
"In the next lines we create an Iot hub with default characteristics and a basic SKU."
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Creating Iot Hub...\n",
"Iot Hub Done.\n"
]
}
],
"source": [
"from azure.mgmt.iothub import IotHubClient\n",
"\n",
"iot_hub_client = get_client_from_cli_profile(IotHubClient)\n",
"resouce_name = 'iot-hub-name' \n",
"location = 'centralus'\n",
"\n",
"print('Creating Iot Hub...')\n",
"iot_hub_client.iot_hub_resource.create_or_update(\n",
" resource_group,\n",
" resouce_name,\n",
" {\n",
" 'location': location,\n",
" 'subscriptionid': subscription_id,\n",
" 'resourcegroup': resource_group,\n",
" 'sku': {\n",
" 'name': 'S1',\n",
" 'capacity': 2\n",
" },\n",
" 'properties': {\n",
" 'enable_file_upload_notifications': False,\n",
" 'operations_monitoring_properties': {\n",
" 'events': {\n",
" \"C2DCommands\": \"Error\",\n",
" \"DeviceTelemetry\": \"Error\",\n",
" \"DeviceIdentityOperations\": \"Error\",\n",
" \"Connections\": \"Information\"\n",
" }\n",
" },\n",
" \"features\": \"None\",\n",
" }\n",
" }\n",
").result()\n",
"\n",
"print('Iot Hub Done.')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Data Lake Store\n",
"\n",
"Azure Data Lake Store is an enterprise-wide hyper-scale repository for big data analytic workloads. Azure Data Lake enables you to capture data of any size, type, and ingestion speed in one single place for operational and exploratory analytics. Data Lake Store can store trillions of files. A single file can be larger than one petabyte in size. This makes Data Lake Store ideal for storing any type of data including massive datasets like high-resolution video, genomic and seismic datasets, medical data, and data from a wide variety of industries."
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Creating Data Lake Store...\n",
"Data Lake Store Done.\n"
]
}
],
"source": [
"from azure.mgmt.datalake.store import DataLakeStoreAccountManagementClient\n",
"\n",
"data_lake_store_client = get_client_from_cli_profile(DataLakeStoreAccountManagementClient)\n",
"data_lake_store_account = \"Data-Lake-Store-Name\"\n",
"location = 'centralus'\n",
"\n",
"print('Creating Data Lake Store...')\n",
"data_lake_store_client.account.create(\n",
" resource_group,\n",
" data_lake_store_account,\n",
" {\n",
" 'location': location\n",
" },\n",
" None,\n",
" False\n",
").result()\n",
"print('Data Lake Store Done.')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Data Lake Analytics\n",
"\n",
"Azure Data Lake Analytics allow us to process big data jobs in seconds. There is no infrastructure to worry about because there are no servers, virtual machines, or clusters to wait for, manage, or tune. We can instantly scale the processing power, measured in Azure Data Lake Analytics Units (AU), from one to thousands for each job. "
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Creating Data Lake Analytics...\n",
"Data Lake Analytics Done.\n"
]
}
],
"source": [
"from azure.mgmt.datalake.analytics.account import DataLakeAnalyticsAccountManagementClient\n",
"from azure.mgmt.datalake.analytics.account.models import DataLakeAnalyticsAccount, DataLakeStoreAccountInfo\n",
"\n",
"\n",
"resource_name = \"Data-Lake-Analytics-Name\"\n",
"location = 'centralus'\n",
"data_lake_analytics_client = get_client_from_cli_profile(DataLakeAnalyticsAccountManagementClient)\n",
"\n",
"print('Creating Data Lake Analytics...')\n",
"data_lake_analytics_client.account.create(\n",
" resource_group,\n",
" resource_name,\n",
" DataLakeAnalyticsAccount(\n",
" location=location,\n",
" default_data_lake_store_account=data_lake_store_account,\n",
" data_lake_store_accounts=[DataLakeStoreAccountInfo(name=data_lake_store_account)]\n",
" )\n",
").result()\n",
"print('Data Lake Analytics Done.')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Cosmos DB\n",
"\n",
"Cosmos DB is Microsofts globally distributed, multi-model database. It supports multiple data models, including but not limited to document, graph, key-value, table, and column-family data models. CosmosDB is able to elastically and independently scale throughput and storage on demand and worldwide\n",
"\n",
"CosmosDB provides a solution for doing hot path analytics of our Iot generated data.\n"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Creating CosmosDB...\n",
"Creating CosmosDB...\n"
]
}
],
"source": [
"from azure.mgmt.cosmosdb.cosmos_db import CosmosDB\n",
"from azure.mgmt.cosmosdb.models import Location\n",
"resource_name = \"CosmosDB-Name\"\n",
"location = 'centralus'\n",
"## As stated before cosmosDB is globally distributed, therefore we can especify\n",
"## in which regions is it going to be deployed\n",
"locations = [Location( location_name= 'centralus', failover_priority = 1), Location(location_name= 'eastus', failover_priority = 0)]\n",
"cosmosdb_client = get_client_from_cli_profile(CosmosDB)\n",
"\n",
"\n",
"print('Creating CosmosDB...')\n",
"cosmosdb_result = cosmosdb_client.database_accounts.create_or_update(\n",
" resource_group,\n",
" resource_name,\n",
" {\n",
" 'location' : location,\n",
" 'locations' : locations,\n",
" 'kind': 'MongoDB',\n",
" } \n",
").result()\n",
"print('CosmosDB Done.')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Now if you go into the Azure Portal your services are going to be set up in the resource group that we created in the beginning "
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,335 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Obtaining IoT data \n",
"\n",
"## Summary \n",
"\n",
"This notebook explains how to obtain telemetry data coming from IoT devices that arrives trough a gateway enabled edgeHub.\n",
"\n",
"## Description\n",
"\n",
"The purpose of this notebook is to explain and guide the reader onto how to obtain telemetry data generated from IoT devices within the DSVM IoT extension. In order to achieve this, we propose the following architecture.\n",
"\n",
"![Architecture](https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Img/Architecture.PNG)\n",
"\n",
"\n",
"An IoT device sends telemetry data into a gateway enabled EdgeHub, then the route is splitted, the data goes upstream to IoT Hub and it also goes into a 'Extractor Module' which it's only job is to read and dump incoming telemetry data into the host.\n",
"\n",
"\n",
"## Requirements\n",
"\n",
"* An Iot Hub\n",
"* A gateway enabled Iot Edge runtime, please refer to the 'Setting up your Edge device' notebook.\n",
"\n",
"\n",
"## Documentation\n",
"\n",
"* Iot Edge modules: https://docs.microsoft.com/en-us/azure/iot-edge/module-composition![image.png](attachment:image.png)\n",
"\n",
"\n",
"\n",
"\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Step 1: Building the 'Extractor Module'\n",
"\n",
"In order to read incoming telemetry data we need to route that data into a module that can unpack the data and store it locally on the host, the following code shows what's running inside the container:\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"import random\n",
"import time\n",
"import sys\n",
"import iothub_client\n",
"from iothub_client import IoTHubModuleClient, IoTHubClientError, IoTHubTransportProvider\n",
"from iothub_client import IoTHubMessage, IoTHubMessageDispositionResult, IoTHubError\n",
"import os\n",
"\n",
"# messageTimeout - the maximum time in milliseconds until a message times out.\n",
"# The timeout period starts at IoTHubModuleClient.send_event_async.\n",
"# By default, messages do not expire.\n",
"MESSAGE_TIMEOUT = 10000\n",
"# global counters\n",
"RECEIVE_CALLBACKS = 0\n",
"SEND_CALLBACKS = 0\n",
"# Choose HTTP, AMQP or MQTT as transport protocol. Currently only MQTT is supported.\n",
"PROTOCOL = IoTHubTransportProvider.MQTT\n",
"\n",
"# Callback received when the message that we're forwarding is processed.\n",
"def send_confirmation_callback(message, result, user_context):\n",
" global SEND_CALLBACKS\n",
" print ( \"Confirmation[%d] received for message with result = %s\" % (user_context, result) )\n",
" map_properties = message.properties()\n",
" key_value_pair = map_properties.get_internals()\n",
" print ( \" Properties: %s\" % key_value_pair )\n",
" SEND_CALLBACKS += 1\n",
" print ( \" Total calls confirmed: %d\" % SEND_CALLBACKS )\n",
"\n",
"\n",
"# receive_message_callback is invoked when an incoming message arrives on the specified \n",
"# input queue (in the case of this sample, \"input1\"). Because this is a filter module, \n",
"# we forward this message to the \"output1\" queue.\n",
"def receive_message_callback(message, hubManager):\n",
" global RECEIVE_CALLBACKS\n",
" message_buffer = message.get_bytearray()\n",
" size = len(message_buffer)\n",
" message_text = message_buffer[:size].decode('utf-8')\n",
" print ( \" Data: <<<%s>>> & Size=%d\" % (message_text, size) )\n",
" map_properties = message.properties()\n",
" key_value_pair = map_properties.get_internals()\n",
" print ( \" Properties: %s\" % key_value_pair )\n",
" RECEIVE_CALLBACKS += 1\n",
" print ( \" Total calls received: %d\" % RECEIVE_CALLBACKS )\n",
" data = json.loads(message_text)\n",
" with open(\"/myvol/data.json\", \"a\") as myfile:\n",
" myfile.write(json.dumps(data))\n",
" myfile.write('\\n')\n",
" return IoTHubMessageDispositionResult.ACCEPTED\n",
"\n",
"\n",
"class HubManager(object):\n",
"\n",
" def __init__(\n",
" self,\n",
" protocol=IoTHubTransportProvider.MQTT):\n",
" self.client_protocol = protocol\n",
" self.client = IoTHubModuleClient()\n",
" self.client.create_from_environment(protocol)\n",
"\n",
" # set the time until a message times out\n",
" self.client.set_option(\"messageTimeout\", MESSAGE_TIMEOUT)\n",
" \n",
" # sets the callback when a message arrives on \"input1\" queue. Messages sent to \n",
" # other inputs or to the default will be silently discarded.\n",
" self.client.set_message_callback(\"input1\", receive_message_callback, self)\n",
"\n",
" # Forwards the message received onto the next stage in the process.\n",
" def forward_event_to_output(self, outputQueueName, event, send_context):\n",
" self.client.send_event_async(\n",
" outputQueueName, event, send_confirmation_callback, send_context)\n",
"\n",
"def main(protocol):\n",
" try:\n",
" print ( \"\\nPython %s\\n\" % sys.version )\n",
" print ( \"EdgeHub Extractor for Python\" )\n",
" hub_manager = HubManager(protocol)\n",
" print ( \"Starting the Extractor using protocol %s...\" % hub_manager.client_protocol )\n",
" while True:\n",
" time.sleep(1)\n",
"\n",
" except IoTHubError as iothub_error:\n",
" print ( \"Unexpected error %s from IoTHub\" % iothub_error )\n",
" return\n",
" except KeyboardInterrupt:\n",
" print ( \"Extractor stopped\" )\n",
"\n",
"if __name__ == '__main__':\n",
" main(PROTOCOL)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Next is the docker file used to build the container, notice that the data that arrives to the module it's going to be stored in a docker volume:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"FROM ubuntu:xenial\n",
"\n",
"WORKDIR /app\n",
"\n",
"RUN apt-get update && \\\n",
" apt-get install -y --no-install-recommends libcurl4-openssl-dev python-pip libboost-python-dev && \\\n",
" rm -rf /var/lib/apt/lists/* \n",
"\n",
"COPY requirements.txt ./\n",
"RUN pip install -r requirements.txt\n",
"\n",
"COPY . .\n",
"RUN mkdir /myvol\n",
"RUN touch /myvol/data.json\n",
"RUN chmod 777 /myvol/data.json\n",
"VOLUME /myvol\n",
"RUN useradd -ms /bin/bash moduleuser\n",
"USER moduleuser\n",
"\n",
"CMD [ \"python\", \"-u\", \"./main.py\" ]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Step 2: Using the module\n",
"\n",
"There are currently two options for using the 'Extractor Module':\n",
"\n",
"### Building it by yourself:\n",
"\n",
"In case that you want to build the container yourself and deploy it to a repository of your own we have included the required files for the module under 'home/$USER/IoT/IotEdge/gateway/extractor_module', the directory includes the before mentioned files:\n",
"\n",
"* main.py\n",
"* Dockerfile\n",
"* Requirements.txt\n",
"\n",
"you can run the command:\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%bash\n",
"docker build -t 'name of container' /home/$USER/IoT/IotEdge/gateway/extractor_module"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"in order to create it and publish it in the repo of your choice.\n",
"\n",
"\n",
"### Using the one in docker-hub:\n",
"\n",
"We have already a module created and hosted in docker-hub that's ready to use.\n",
"\n",
"https://hub.docker.com/r/tdavega/extractor_module/\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Step 3: Deployment\n",
"\n",
"Once you have created your module and you have published it to your repo (or used the one we have provided), what remains is to deploy the 'architecture' of modules into the Iot Edge runtime of the vm.\n",
"\n",
"\n",
"### 3.1: Go into your Iot Hub\n",
"\n",
"Go to the azure portal and then to the Iot Hub that you have paired, and in there you should see the name of your VM device, click on it.\n",
"\n",
"![EdgeDevice](https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Img/EdgeDevice.PNG)\n",
"\n",
"### 3.2: Set modules\n",
"\n",
"In the next screen that appears click on set modules.\n",
"\n",
"![SetModules](https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Img/SetModules.PNG)\n",
"\n",
"### 3.3: Add custom module\n",
"\n",
"In the next screen click on add module and select Iot Edge module, fill fields after tgat you should see your custom module in the modules list.\n",
"\n",
"![AddModule](https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Img/AddModule.PNG)\n",
"\n",
"NOTE: If you're using azure container registry you need to fill the container registry settings fields just above the button for adding modules.\n",
"\n",
"Once you're done, click on next.\n",
"\n",
"### 3.4: Routing\n",
"\n",
"In order to route the incoming telemetry messages from Edge Hub to our module and also upstream (to Iot Hub) we are going to add the following routes:\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"{\n",
" \"routes\": {\n",
" \"sensorToExtractormodule\": \"FROM /messages/* INTO BrokeredEndpoint(\\\"/modules/extractormodule/inputs/input1\\\")\",\n",
" \"IncomingDataToIoTHub\": \"FROM /messages/* INTO $upstream\"\n",
" }\n",
"}"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"\n",
"the first route will send incoming sensor data into our custom module and the second one will also send a copy to IoT Hub.\n",
"\n",
"Note: You need to replace 'extractormodule' by the name you have given to your custom module.\n",
"\n",
"You should see something like this:\n",
"\n",
"![Routes](https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Img/Routes.PNG)\n",
"\n",
"\n",
"Click on next and in the next screen click submit.\n",
"\n",
"\n",
"if everything went alright you should see your modules running by using the following commands:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%bash\n",
"\n",
"iotedge list "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Now that everything is set up you can start sending telemetry data into your vm.\n",
"\n",
"Note: Remember that you need to install your certificates in your downstream device."
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,264 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Processing IoT data \n",
"\n",
"## Summary \n",
"\n",
"This notebook explains how to process telemetry data coming from IoT devices that arrives trough a gateway enabled edgeHub.\n",
"\n",
"## Description\n",
"\n",
"The purpose of this notebook is to explain and guide the reader onto how to process telemetry data generated from IoT devices whitin the DSVM IoT extension.\n",
"\n",
"## Requirements\n",
"\n",
"* A gateway enabled Edge Runtime. See 'Setting up IoT Edge'\n",
"* A sniffer architecture deployed. See 'Obtaining IoT Telemetry'\n",
"* A device sending telemetry to your gateway. for this notebook we have choosed the scenario where a device is sending Temperature telemetry.\n",
"\n",
"## Documentation\n",
"\n",
"* https://tutorials-raspberrypi.com/raspberry-pi-measure-humidity-temperature-dht11-dht22/\n",
"* http://www.bzarg.com/p/how-a-kalman-filter-works-in-pictures/"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Step 1: Reading generated data\n",
"\n",
"During this step we are going to load the data generated from the IoT devices. The sniffer module mounts a docker volume in order to share data between the module and the host, in order to retrieve the path where the module is storing it's data you can run the following:\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%bash\n",
"# Listing the volumes\n",
"sudo docker volume ls"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%bash\n",
"# Getting the volume path\n",
"sudo docker inspect <volume id>"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Copy the path that was obtained as result from the last command.\n",
"\n",
"Since the file location is protected, we are going to make a directory and copy the file over there. The name of the file generated from the module is called data.json\n",
"\n",
"Path: volume_path/data.json\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%bash\n",
"## Making a directory\n",
"mkdir \"/home/$USER/IoT/Data\"\n",
"sudo cp <file path> \"/home/$USER/IoT/Data/data.json\"\n",
"## "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Next, we are going to extract the data using python."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"import numpy as np\n",
"\n",
"## Reading the data from the file\n",
"## Note Change your user\n",
"path = \"/home/<user>/IoT/Data\"\n",
"file = path + \"/data.json\"\n",
"data = {}\n",
"with open(file) as f:\n",
" for line in f.readlines():\n",
" sample = json.loads(line)\n",
" for key in sample.keys():\n",
" if key in data:\n",
" data[key].append([len(data[key]),sample[key]])\n",
" else:\n",
" data[key] = []\n",
" data[key].append([len(data[key]),sample[key]])\n",
"\n",
"temperature = np.array(data['temperature'])\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Step 2: Using a low pass filter in order to detect anomalies \n",
"\n",
"\"The simplest approach to identifying irregularities in data is to flag the data points that deviate from common statistical properties of a distribution, including mean, median, mode, and quantiles. Let's say the definition of an anomalous data point is one that deviates by a certain standard deviation from the mean. Traversing mean over time-series data isn't exactly trivial, as it's not static. You would need a rolling window to compute the average across the data points. Technically, this is called a rolling average or a moving average, and it's intended to smooth short-term fluctuations and highlight long-term ones. Mathematically, an n-period simple moving average can also be defined as a 'low pass filter.' \"\n",
"\n",
"\n",
"In this next step we are going to build a low pass filter (moving average) using discrete linear convolution to detect anomalies in our telemetry data. Check the documentation for a more detailed explanation of the theory.\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from __future__ import division\n",
"from itertools import count\n",
"import matplotlib.pyplot as plt\n",
"from numpy import linspace, loadtxt, ones, convolve\n",
"import pandas as pd\n",
"import collections\n",
"from random import randint\n",
"from matplotlib import style\n",
"style.use('fivethirtyeight')\n",
"%matplotlib inline\n",
"print(temperature)\n",
"## Adding some noise\n",
"#temperature[50][1] = 50.0\n",
"#temperature[100][1] = 75.0\n",
"#temperature[150][1] = 50.0\n",
"#temperature[200][1] = 75.0\n",
"data_as_frame = pd.DataFrame(temperature, columns=['index','temperature'])\n",
"data_as_frame.head()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Computes moving average using discrete linear convolution of two one dimensional sequences.\n",
"def moving_average(data, window_size):\n",
" window = np.ones(int(window_size))/float(window_size)\n",
" return np.convolve(data, window, 'same')\n",
"\n",
"# Helps in exploring the anamolies using stationary standard deviation\n",
"def explain_anomalies(y, window_size, sigma=1.0):\n",
" avg = moving_average(y, window_size).tolist()\n",
" residual = y - avg\n",
" # Calculate the variation in the distribution of the residual\n",
" std = np.std(residual)\n",
" return {'standard_deviation': round(std, 3),\n",
" 'anomalies_dict': collections.OrderedDict([(index, y_i) for\n",
" index, y_i, avg_i in zip(count(), y, avg)\n",
" if (y_i > avg_i + (sigma*std)) | (y_i < avg_i - (sigma*std))])}\n",
"\n",
"# Helps in exploring the anamolies using rolling standard deviation\n",
"def explain_anomalies_rolling_std(y, window_size, sigma=1.0):\n",
" avg = moving_average(y, window_size)\n",
" avg_list = avg.tolist()\n",
" residual = y - avg\n",
" # Calculate the variation in the distribution of the residual\n",
" testing_std = pd.rolling_std(residual, window_size)\n",
" testing_std_as_df = pd.DataFrame(testing_std)\n",
" rolling_std = testing_std_as_df.replace(np.nan,\n",
" testing_std_as_df.ix[window_size - 1]).round(3).iloc[:,0].tolist()\n",
" std = np.std(residual)\n",
" return {'stationary standard_deviation': round(std, 3),\n",
" 'anomalies_dict': collections.OrderedDict([(index, y_i)\n",
" for index, y_i, avg_i, rs_i in zip(count(),\n",
" y, avg_list, rolling_std)\n",
" if (y_i > avg_i + (sigma * rs_i)) | (y_i < avg_i - (sigma * rs_i))])}\n",
"\n",
"\n",
"# This function is repsonsible for displaying how the function performs on the given dataset.\n",
"def plot_results(x, y, window_size, sigma_value=1,text_xlabel=\"X Axis\", text_ylabel=\"Y Axis\", applying_rolling_std=False):\n",
" plt.figure(figsize=(15, 8))\n",
" plt.plot(x, y, \"k.\")\n",
" y_av = moving_average(y, window_size)\n",
" plt.plot(x, y_av, color='green')\n",
" plt.xlim(0, 1000)\n",
" plt.xlabel(text_xlabel)\n",
" plt.ylabel(text_ylabel)\n",
"\n",
" # Query for the anomalies and plot the same\n",
" events = {}\n",
" if applying_rolling_std:\n",
" events = explain_anomalies_rolling_std(y, window_size=window_size, sigma=sigma_value)\n",
" else:\n",
" events = explain_anomalies(y, window_size=window_size, sigma=sigma_value)\n",
"\n",
" x_anomaly = np.fromiter(events['anomalies_dict'].keys(), dtype=int, count=len(events['anomalies_dict']))\n",
" y_anomaly = np.fromiter(events['anomalies_dict'].values(), dtype=float,\n",
" count=len(events['anomalies_dict']))\n",
" plt.plot(x_anomaly, y_anomaly, \"r*\", markersize=12)\n",
"\n",
" # add grid and lines and enable the plot\n",
" plt.grid(True)\n",
" plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"x = data_as_frame['index']\n",
"Y = data_as_frame['temperature']\n",
"\n",
"\n",
"# plot the results\n",
"plot_results(x, y=Y, window_size=10, text_xlabel=\"Moment\", sigma_value=3,\n",
" text_ylabel=\"Temperature\")\n",
"events = explain_anomalies(y, window_size=5, sigma=3)\n",
"\n",
"# Display the anomaly dict\n",
"print(\"Information about the anomalies model:{}\".format(events))"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,208 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Setting up your Edge device\n",
"\n",
"## Summary \n",
"\n",
"This notebook explains how to register your DSVM as an edge device and enable it as a gateway. \n",
"\n",
"## Description\n",
"\n",
"The purpose of this notebook is to run trough and explain the scripts that have been included in the DSVM IoT extension that register and enable the VM as an Edge device.\n",
"\n",
"## Requirements\n",
"\n",
"* Before starting this notebook you need to perform an azure cli login (az login)\n",
"\n",
"\n",
"## Documentation\n",
"https://docs.microsoft.com/en-us/azure/iot-edge/how-to-create-transparent-gateway-linux\n",
"\n",
"https://docs.microsoft.com/en-us/azure/iot-edge/how-to-install-iot-edge-linux"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Step 1: Enable the VM as an Edge device\n",
"\n",
"The DSVM IoT extension includes by default the Iot Edge runtime, it also includes an script for registering the VM as an Edge Device to the IoT hub that was selected during the creation of the Extension.\n",
"\n",
"The path to this script is /home/$USER/IotEgde/edge_configure.sh\n",
"\n",
"The script has this steps:\n",
"\n",
"1. Retrieves the Iot Hub name that was selected during the creation of the VM and also the subscription Id. \n",
"2. Performs an Az login in order to verify the user.\n",
"3. Creates a new Edge device on IoT Hub and retrieves the connection string. \n",
"4. Inserts the connection string into the Iot Edge daemon config file.\n",
"5. Restarts the service. \n",
"\n",
"As soon as you run this script, you're able to specify deployments for your VM from Iot Hub.\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%bash\n",
"source /etc/profile\n",
"# Obtaining Vm info\n",
"user=\"$(whoami)\"\n",
"hubName=\"$(cat /home/$user/.hubName.txt)\"\n",
"host=\"$(hostname)\"\n",
"device=\"${host}_edge\"\n",
"subid=\"$(cat /home/$user/.vminfo.json | jq '.compute.subscriptionId')\"\n",
"subid=\"${subid%\\\"}\"\n",
"subid=\"${subid#\\\"}\"\n",
"az account set -s $subid\n",
"# Register edge device \n",
"az iot hub device-identity create --device-id $device --hub-name $hubName --edge-enabled\n",
"# Inserting connection string into config file\n",
"connection_string=\"$(az iot hub device-identity show-connection-string --device-id $device --hub-name $hubName )\"\n",
"connection_string=\"${connection_string::-2}\"\n",
"connection_string=\"${connection_string:10}\"\n",
"connection_string=\" device_connection_string: $connection_string\"\n",
"sudo sed -i '26s/.*//' /etc/iotedge/config.yaml\n",
"sudo sed -i '26i\\'\"$connection_string\" /etc/iotedge/config.yaml\n",
"# Reboting edge for pairing to proceed\n",
"sudo systemctl restart iotedge"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Step 2: Enable the VM as a Gateway\n",
"\n",
"You can send telemetry data generated by IoT devices directly into the DSVM by enabling the Iot Edge runtime as a gateway. We have added an script in order to enable the gateway in a quickstart scenario.\n",
"\n",
"The path to this script is /home/$USER/IotEdge/gateway/gateway_configure.sh\n",
"\n",
"Note: you need to send as a parameter a name for the device, this is used in order to generate the certificates. \n",
"\n",
"The script has this steps:\n",
"\n",
"1. It validates that you have send the device name.\n",
"2. Generates the quickstart certificates.\n",
"3. Generates the certificate chain.\n",
"4. Inserts the certificates location into the Iot Edge Daemon config file.\n",
"5. Restarts the service.\n",
"\n",
"Note: \n",
" * The scripts for generating the certificates are not meant for production eviroments, just for a quickstart. \n",
" * DO NOT use a name that is the same as the gateway's DNS host name. Doing so will cause client certification against the certificates to fail.\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%bash\n",
"source /etc/profile\n",
"## You need to set your device name for the certificates\n",
"device_name=\"mytestvm\"\n",
"if [ -z ${device_name+x} ];\n",
"then\n",
" echo \"Missing parameter: Gateway device name\"\n",
" exit 1\n",
"fi\n",
"directory=\"/home/$USER/IoT/IotEdge/gateway\"\n",
"# Certificate generation\n",
"(cd $directory/certificates/; sudo bash certGen.sh create_root_and_intermediate; sudo bash certGen.sh create_edge_device_certificate $device_name )\n",
"#(cd certificates/; sudo bash certGen.sh create_edge_device_certificate $device_name)\n",
"# Chain creation\n",
"sudo chmod 777 $directory/certificates/certs\n",
"sudo cat $directory/certificates/certs/new-edge-device.cert.pem $directory/certificates/certs/azure-iot-test-only.intermediate.cert.pem $directory/certificates/certs/azure-iot-test-only.root.ca.cert.pem > $directory/certificates/certs/new-edge-device-full-chain.cert.pem\n",
"certdir=\"$directory/certificates\"\n",
"device_ca_cert=\" device_ca_cert: $certdir/certs/new-edge-device-full-chain.cert.pem\"\n",
"device_ca_pk=\" device_ca_pk: $certdir/private/new-edge-device.key.pem\"\n",
"trusted_ca_certs=\" trusted_ca_certs: $certdir/certs/azure-iot-test-only.root.ca.cert.pem\"\n",
"title=\"certificates:\"\n",
"sudo sed -i '53s/.*//' /etc/iotedge/config.yaml\n",
"sudo sed -i '54s/.*//' /etc/iotedge/config.yaml\n",
"sudo sed -i '55s/.*//' /etc/iotedge/config.yaml\n",
"sudo sed -i '56s/.*//' /etc/iotedge/config.yaml\n",
"sudo sed -i '53i\\'\"$title\" /etc/iotedge/config.yaml\n",
"sudo sed -i '54i\\'\"$device_ca_cert\" /etc/iotedge/config.yaml\n",
"sudo sed -i '55i\\'\"$device_ca_pk\" /etc/iotedge/config.yaml\n",
"sudo sed -i '56i\\'\"$trusted_ca_certs\" /etc/iotedge/config.yaml\n",
"sudo systemctl restart iotedge"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## [Optional] Step 3: Troubleshooting the Gateway:\n",
"\n",
"\n",
"At the moment of writing this notebook, the IoT Edge gateway presents an issue where the runtime does not uses the user generated certificates and instead it uses the 'Quick Start' certificates which does not allow us to connect to the gateway, in order to test that everything is set up right run the following command, if everything is right you should see a 'Verified ok' status if not, then follow the next steps"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%bash\n",
"openssl s_client -connect localhost:8883 -CAfile /home/$USER/IoT/IotEdge/gateway/certificates/certs/azure-iot-test-only.root.ca.cert.pem -showcerts"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"If the last commands failed, then we are going to run the following commands, what will happen is that it will delete the 'Quick Start' certificates folders, the docker images and then restart the service for it to work."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%bash\n",
"\n",
"sudo systemctl stop iotedge\n",
"sudo rm -rf /var/lib/iotedge/hsm/certs\n",
"sudo rm -rf /var/lib/iotedge/hsm/cert_keys\n",
"sudo docker rmi mcr.microsoft.com/azureiotedge-hub:1.0 -f\n",
"sudo docker rmi mcr.microsoft.com/azureiotedge-agent:1.0 -f\n",
"sudo systemctl restart iotedge\n",
"\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,32 +0,0 @@
# Welcome to Azure IoT Edge on Data Science Virtual Machine
This tool is an extension to the [Microsoft Azure Data Science Virtual Machine (DSVM)](http://aka.ms/dsvm) to add the Azure IoT Edge capability to bring the power of advanced analytics, machine learning, and artificial intelligence to the edge. The Azure IoT Edge run time will be layered on top of the [Ubuntu edition of the DSVM](http://aka.ms/dsvm/ubuntu), combining the rich AI and Machine Learning toolset on the DSVM with the capabilities to build applications for the intelligent edge.
The extension will create an VM provisioned with the IoT tools required for developing IoT + DS solutions which includes:
| Name | Version |
| ---- | ------- |
| Azure IoT SDK - Python | 1.4.2 |
| Azure IoT SDK - C | 1.2.8 |
| Azure IoT SDK - Java | 1.13.3 |
| VS Code IoT Workbench extension | 0.1.9 |
| VS Code IoT Edge extension | 1.2.0 |
| Azure CLI IoT extension | 0.3.2 |
| IoT Edge Runtime | 1.0.1 |
The extension can be paired to an existing Iot Hub or a new Iot Hub can be created on the deployment for pairing with the VM. We have also included scripts to make your set up more quickly and Jupiter notebooks to introduce you to some of the new capabilities of the IoT extension:
| Name | Info |
| ---- | ---- |
| edge_configure.sh | This script pairs the IoT Rdge runtime to the IoT Hub that you selected during the creation of the extension |
| gateway_configure.sh | This script generates quickstart certificates and inserts the paths into the security daemon of the IoT Edge Runtime in order to transform it into a gateway |
| ObtainingIoTData.ipynb | This notebook explains how can you leverage the included IoT Edge runtime in order to build a sample architecture for processing incoming IoT telemetry inside the virtual machine by using docker containers and docker volumes |
| ProcessingIoTData.ipynb | This notebook explains shows how to run experiments by using incoming telemetry data from the proposed architecture |
| SettingUpEdge.ipynb | This notebook explains more detailed the included bash scripts for setting up your IoT Edge enviroment |
Click the button below to create a Data Science VM IoT extension.
<a href="https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FDataScienceVM%2Fmaster%2FExtensions%2FIoT%2Fazuredeploy.json" target="_blank">
- <img src="http://azuredeploy.net/deploybutton.png"/>
-</a>

Просмотреть файл

@ -1,290 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"adminUsername": {
"type": "string",
"metadata": {
"description": "Username for the Virtual Machine."
}
},
"adminPassword": {
"type": "securestring",
"metadata": {
"description": "Password for the Virtual Machine."
}
},
"numberOfInstances": {
"type": "string",
"defaultValue": "1",
"metadata": {
"description": "Number of VMs to deploy."
}
},
"vmName": {
"type": "string",
"metadata": {
"description": "Name Prefix for the Virtual Machine."
}
},
"vmSize": {
"type": "string",
"defaultValue": "Standard_DS2_v2",
"metadata": {
"description": "Size for the Virtual Machine."
}
},
"hubExists": {
"type": "string",
"allowedValues": [
"new",
"existing"
],
"metadata": {
"description": "Use an existing Iot hub or create a new one"
}
},
"iotHubName": {
"type": "string",
"metadata": {
"description": "Name for the Iot hub."
}
}
},
"variables": {
"location": "[resourceGroup().location]",
"numberOfInstances": "[int(parameters('numberOfInstances'))]",
"imagePublisher": "microsoft-dsvm",
"imageOffer": "linux-data-science-vm-ubuntu",
"sku": "linuxdsvmubuntu",
"nicName": "[parameters('vmName')]",
"addressPrefix": "10.0.0.0/16",
"subnetName": "Subnet",
"subnetPrefix": "10.0.0.0/24",
"storageAccountType": "Standard_LRS",
"publicIPAddressType": "Dynamic",
"publicIPAddressName": "[concat('dsvm-',toLower(parameters('vmName') ) )]",
"vmName": "[parameters('vmName')]",
"vmSize": "[parameters('vmSize')]",
"virtualNetworkName": "[parameters('vmName')]",
"nsgName": "[concat(parameters('vmName'),'-nsg')]",
"nsgId": "[resourceId(resourceGroup().name, 'Microsoft.Network/networkSecurityGroups', variables('nsgName'))]",
"vnetID": "[resourceId('Microsoft.Network/virtualNetworks',variables('virtualNetworkName'))]",
"subnetRef": "[concat(variables('vnetID'),'/subnets/',variables('subnetName'))]",
"fileUris": "https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/vm_configure.sh",
"hubExists": "[parameters('hubExists')]",
"iotHubName": "[parameters('iotHubName')]",
"command": "bash vm_configure.sh ",
"adminUsername": "[parameters('adminUsername')]",
"scriptParameters": "[concat(variables('iotHubName'),' ',variables('adminUsername'))]",
"commandToExecute": "[concat(variables('command'),variables('scriptParameters'))]"
},
"resources": [
{
"apiVersion": "2018-02-01",
"type": "Microsoft.Network/networkSecurityGroups",
"location": "[variables('location')]",
"name": "[variables('nsgName')]",
"properties": {
"securityRules": [
{
"name": "Allow-SSH-Jupyterhub",
"properties": {
"protocol": "Tcp",
"sourcePortRange": "*",
"sourceAddressPrefix": "*",
"destinationAddressPrefix": "*",
"access": "Allow",
"priority": 100,
"direction": "Inbound",
"sourcePortRanges": [],
"destinationPortRanges": [
"22",
"8000"
],
"destinationPortRange": ""
}
}
]
}
},
{
"apiVersion": "2015-05-01-preview",
"type": "Microsoft.Network/publicIPAddresses",
"name": "[concat(variables('publicIPAddressName'), copyindex())]",
"location": "[variables('location')]",
"copy": {
"name": "publicIPLoop",
"count": "[variables('numberOfInstances')]"
},
"properties": {
"publicIPAllocationMethod": "[variables('publicIPAddressType')]"
}
},
{
"apiVersion": "2015-05-01-preview",
"type": "Microsoft.Network/virtualNetworks",
"name": "[variables('virtualNetworkName')]",
"location": "[variables('location')]",
"dependsOn": [
"[concat('Microsoft.Network/networkSecurityGroups/', variables('nsgName'))]"
],
"properties": {
"addressSpace": {
"addressPrefixes": [
"[variables('addressPrefix')]"
]
},
"subnets": [
{
"name": "[variables('subnetName')]",
"properties": {
"addressPrefix": "[variables('subnetPrefix')]"
}
}
]
}
},
{
"apiVersion": "2015-05-01-preview",
"type": "Microsoft.Network/networkInterfaces",
"name": "[concat(variables('nicName'), copyindex())]",
"location": "[variables('location')]",
"copy": {
"name": "nicLoop",
"count": "[variables('numberOfInstances')]"
},
"dependsOn": [
"[concat('Microsoft.Network/publicIPAddresses/', concat(variables('publicIPAddressName'),copyindex()))]",
"[concat('Microsoft.Network/networkSecurityGroups/', variables('nsgName'))]",
"[concat('Microsoft.Network/virtualNetworks/', variables('virtualNetworkName'))]"
],
"properties": {
"ipConfigurations": [
{
"name": "ipconfig1",
"properties": {
"privateIPAllocationMethod": "Dynamic",
"publicIPAddress": {
"id": "[resourceId('Microsoft.Network/publicIPAddresses',concat(variables('publicIPAddressName'),copyindex()))]"
},
"subnet": {
"id": "[variables('subnetRef')]"
}
}
}
],
"networkSecurityGroup": {
"id": "[variables('nsgId')]"
}
}
},
{
"condition": "[equals(variables('hubExists'), 'new')]",
"apiVersion": "2016-02-03",
"type": "Microsoft.Devices/IotHubs",
"name": "[variables('iotHubName')]",
"location": "[variables('location')]",
"sku": {
"name": "S1",
"tier": "Standard",
"capacity": 1
},
"properties": {
"location": "[variables('location')]"
}
},
{
"apiVersion": "2017-03-30",
"type": "Microsoft.Compute/virtualMachines",
"name": "[concat(variables('vmName'), copyIndex())]",
"location": "[variables('location')]",
"copy": {
"name": "virtualMachineLoop",
"count": "[variables('numberOfInstances')]"
},
"tags": {
"Application": "DataScience"
},
"dependsOn": [
"[concat('Microsoft.Network/networkInterfaces/', variables('nicName'), copyindex())]"
],
"properties": {
"hardwareProfile": {
"vmSize": "[variables('vmSize')]"
},
"osProfile": {
"computerName": "[concat(variables('vmName'), copyIndex())]",
"adminUsername": "[parameters('adminUsername')]",
"adminPassword": "[parameters('adminPassword')]"
},
"storageProfile": {
"imageReference": {
"publisher": "[variables('imagePublisher')]",
"offer": "[variables('imageOffer')]",
"sku": "[variables('sku')]",
"version": "latest"
},
"osDisk": {
"managedDisk": {
"storageAccountType": "[variables('storageAccountType')]"
},
"createOption": "FromImage"
},
"dataDisks": [
{
"managedDisk": {
"storageAccountType": "[variables('storageAccountType')]"
},
"createOption": "FromImage",
"lun": 0
}
]
},
"networkProfile": {
"networkInterfaces": [
{
"id": "[resourceId('Microsoft.Network/networkInterfaces',concat(variables('nicName'), copyindex()))]"
}
]
},
"diagnosticsProfile": {
"bootDiagnostics": {
"enabled": false
}
}
},
"resources": [
{
"type": "extensions",
"name": "[variables('vmName')]",
"apiVersion": "2017-03-30",
"location": "[variables('location')]",
"dependsOn": [
"[concat('Microsoft.Compute/virtualMachines/', variables('vmName'),copyindex())]"
],
"properties": {
"publisher": "Microsoft.OSTCExtensions",
"type": "CustomScriptForLinux",
"typeHandlerVersion": "1.4",
"autoUpgradeMinorVersion": true,
"settings": {
"fileUris": "[split(variables('fileUris'), ' ')]",
"commandToExecute": "[variables('commandToExecute')]"
}
}
}
]
}
],
"outputs": {
"firstDataScienceVmUrl": {
"type": "string",
"value": "[concat('https://ms.portal.azure.com/#resource/subscriptions/', subscription().subscriptionId, '/resourceGroups/', resourceGroup().name, '/providers/Microsoft.Compute/virtualMachines/', variables('vmName'), '0')]"
},
"numInstances": {
"type": "string",
"value": "[parameters('numberOfInstances')]"
}
}
}

Просмотреть файл

@ -1,23 +0,0 @@
#!/bin/bash
# Obtaining Vm info
user="$(whoami)"
hubName="$(cat /home/$user/.hubName.txt)"
host="$(hostname)"
device="${host}_edge"
subid="$(cat /home/$user/.vminfo.json | jq '.compute.subscriptionId')"
subid="${subid%\"}"
subid="${subid#\"}"
# CLI Login
az login
az account set -s $subid
# Register edge device
az iot hub device-identity create --device-id $device --hub-name $hubName --edge-enabled
# Inserting connection string into config file
connection_string="$(az iot hub device-identity show-connection-string --device-id $device --hub-name $hubName )"
connection_string="${connection_string::-2}"
connection_string="${connection_string:10}"
connection_string=" device_connection_string: $connection_string"
sudo sed -i '26s/.*//' /etc/iotedge/config.yaml
sudo sed -i '26i\'"$connection_string" /etc/iotedge/config.yaml
# Reboting edge for pairing to proceed
sudo systemctl restart iotedge

Просмотреть файл

@ -1,28 +0,0 @@
#!/bin/bash
echo "Note: DO NOT use a name that is the same as the gateway's DNS host name. Doing so will cause client certification against the certificates to fail."
if [ -z ${1+x} ];
then
echo "Missing parameter: Gateway device name"
exit 1
fi
# Certificate generation
(cd certificates/; sudo bash certGen.sh create_root_and_intermediate)
(cd certificates/; sudo bash certGen.sh create_edge_device_certificate $1)
# Chain creation
sudo chmod 777 certificates/certs
sudo cat certificates/certs/new-edge-device.cert.pem certificates/certs/azure-iot-test-only.intermediate.cert.pem certificates/certs/azure-iot-test-only.root.ca.cert.pem > certificates/certs/new-edge-device-full-chain.cert.pem
user="$(whoami)"
certdir="/home/$user/IoT/IotEdge/gateway/certificates"
device_ca_cert=" device_ca_cert: $certdir/certs/new-edge-device-full-chain.cert.pem"
device_ca_pk=" device_ca_pk: $certdir/private/new-edge-device.key.pem"
trusted_ca_certs=" trusted_ca_certs: $certdir/certs/azure-iot-test-only.root.ca.cert.pem"
title="certificates:"
sudo sed -i '53s/.*//' /etc/iotedge/config.yaml
sudo sed -i '54s/.*//' /etc/iotedge/config.yaml
sudo sed -i '55s/.*//' /etc/iotedge/config.yaml
sudo sed -i '56s/.*//' /etc/iotedge/config.yaml
sudo sed -i '53i\'"$title" /etc/iotedge/config.yaml
sudo sed -i '54i\'"$device_ca_cert" /etc/iotedge/config.yaml
sudo sed -i '55i\'"$device_ca_pk" /etc/iotedge/config.yaml
sudo sed -i '56i\'"$trusted_ca_certs" /etc/iotedge/config.yaml
sudo systemctl restart iotedge

Просмотреть файл

@ -1,13 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"adminUsername": { "value" : "<<USER>>"},
"adminPassword": { "value" : "<<PASS>>"},
"vmName": { "value" : "<<VMNAME>>"},
"vmSize": { "value" : "<<Size: eg:Standard_DS2_v2>>"},
"numberOfInstances": { "value" : "1"},
"hubExists": { "value" : "existing / new"},
"iotHubName": { "value" : "<<IoT Hub Name to create or attach>>"}
}
}

Просмотреть файл

@ -1,68 +0,0 @@
#!/bin/bash
# Getting user paths
source /etc/profile
# Saving name of IoT Hub for later config
echo $1 | tee "/home/$2/.hubName.txt"
# Saving Vm info for later config
curl -H Metadata:true "http://169.254.169.254/metadata/instance?api-version=2017-08-01" | python -m json.tool > "/home/$2/.vminfo.json"
# Making the IoT Directory
mkdir /home/$2/IoT
chmod 777 /home/$2/IoT
# Installing Edge Config File
dir_path="/home/$2/IoT/IotEdge"
file_path="/home/$2/IoT/IotEdge/edge_configure.sh"
mkdir $dir_path
curl https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/edge_configure.sh > $file_path
chmod 777 $file_path
chmod 777 $dir_path
# Installing IoT Tools and SDKS
# Azures IoT SDKS
mkdir "/home/$2/IoT/IotSDKs"
mkdir "/home/$2/IoT/IotSDKs/Java"
git clone https://github.com/Azure/azure-iot-sdk-java.git "/home/$2/IoT/IotSDKs/Java"
mkdir "/home/$2/IoT/IotSDKs/Python"
git clone --recursive https://github.com/Azure/azure-iot-sdk-python.git "/home/$2/IoT/IotSDKs/Python"
## Tensorflow key
curl https://storage.googleapis.com/tensorflow-serving-apt/tensorflow-serving.release.pub.gpg | sudo apt-key add -
# Installing certificate generation files for gateway
dir_path="/home/$2/IoT/IotEdge/gateway"
mkdir $dir_path
chmod 777 $dir_path
dir_path="/home/$2/IoT/IotEdge/gateway/certificates"
mkdir $dir_path
chmod 777 $dir_path
git clone https://github.com/Azure/azure-iot-sdk-c.git "$dir_path/azure-iot-sdk-c"
file_path="$dir_path/azure-iot-sdk-c/tools/CACertificates/*.cnf"
cp $file_path $dir_path
cp "$dir_path/azure-iot-sdk-c/tools/CACertificates/certGen.sh" $dir_path
chmod 777 "$dir_path/certGen.sh"
# Installing gateway configuration file
file_path="/home/$2/IoT/IotEdge/gateway/gateway_configure.sh"
curl https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/gateway_configure.sh > $file_path
chmod 777 $file_path
# Installing CLI Extension
az extension add --name azure-cli-iot-ext
# Adding Notebooks
dir_path="/home/$2/notebooks/IoT"
mkdir $dir_path
curl https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Notebooks/AzureServices.ipynb > "$dir_path/AzureServices.ipynb"
curl https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Notebooks/ObtainingIoTData.ipynb > "$dir_path/ObtainingIoTData.ipynb"
curl https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Notebooks/ProcessingIoTData.ipynb > "$dir_path/ProcessingIoTData.ipynb"
curl https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Notebooks/SettingUpEdge.ipynb > "$dir_path/SettingUpEdge.ipynb"
# Adding Container files
dir_path="/home/$2/IoT/IotEdge/gateway/extractor_module"
mkdir $dir_path
curl https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Extractor_Module/main.py > "$dir_path/main.py"
curl https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Extractor_Module/Dockerfile > "$dir_path/Dockerfile"
curl https://raw.githubusercontent.com/Azure/DataScienceVM/master/Extensions/IoT/Extractor_Module/requirements.txt > "$dir_path/requirements.txt"
# Installing VScode extensions
sudo -u $2 code --install-extension vsciot-vscode.vscode-iot-workbench
sudo -u $2 code --install-extension vsciot-vscode.azure-iot-edge
/usr/share/unattended-upgrades/unattended-upgrade-shutdown &&
# The IoT C SDK works through apt-get
add-apt-repository -y ppa:aziotsdklinux/ppa-azureiot
# Installing Edge Runtime and C Sdk
apt-get update -y
apt install -y iotedge jq azure-iot-sdk-c-dev
# Updating cli
apt install azure-cli -y --allow-change-held-packages