diff --git a/.gitignore b/.gitignore
index 5ffc3d4..206dbdf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,6 +2,7 @@
.idea
tools/devices-simulator/credentials.js
*.crt
+.ensime
### MacOS ###
.DS_Store
diff --git a/.travis.yml b/.travis.yml
index c557c5a..b418f54 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,7 +2,7 @@ jdk: oraclejdk8
language: scala
scala:
- 2.11.8
- - 2.12.0-RC1
+ - 2.12.0
cache:
directories:
- "$HOME/.ivy2"
@@ -12,5 +12,4 @@ notifications:
slack:
secure: S6pcmclrj9vaqHOFMrjgYkF6wXrYF6nB5joYY0rqAwsmTLf7crXRVKZ8txlatpxMHc20Rbw8RQDM6tTka9wwBkHZZfErrcPsS84d5MU9siEkIY42/bAQwuYhxkcgilttgFmSwzLodE72giC/VMhIYCSOyOXIxuR0VtBiPD9Inm9QZ35dZDx3P3nbnaOC4fk+BjdbrX1LB8YL9z5Gy/9TqI90w0FV85XMef75EnSgpqeMD/GMB5hIg+arWVnC2S6hZ91PPCcxCTKBYDjwqUac8mFW/sMFT/yrb2c0NE6ZQqa3dlx/XFyC1X6+7DjJli2Y8OU+FPjY1tQC8JxgVFTbddIgCdUM/5be4uHN/KNs/yF7w1g06ZXK4jhJxxpL4zWINlqDrDmLaqhAtPQkc2CqL3g8MCwYxBbxZY4aFyPfZD7YLdQXDzJZNcfXn9RQQh5y+/zexbGc1zZ/XUo5bK3VbElSs+o2ErI+Sze0FaiK8fW+QeitBdGvjMY7YVKi0Zzf5Dxx1wwxiHR1PQ1r0hA8YZQxwwdpa5lWLFlSVu2w+upPtXqfINMeFktQPbOs1JWIvUvLV0A38dS6R/DsM/W1a3OEVbHQ0Z6OV1nffDnGYPLUl5kRDPFuYYugmCpQHW73lqJdiM0O+Ote4eOQniL1rcajtt+V5cn1/JRWzdJ4PH0=
before_install:
-- openssl aes-256-cbc -K $encrypted_13c010a56f48_key -iv $encrypted_13c010a56f48_iv
- -in devices.json.enc -out src/test/resources/devices.json -d
+- openssl aes-256-cbc -K $encrypted_cbef0ff679f7_key -iv $encrypted_cbef0ff679f7_iv -in devices.json.enc -out src/test/resources/devices.json -d
diff --git a/CHECKPOINTING.md b/CHECKPOINTING.md
index 20dcb75..ac9f267 100644
--- a/CHECKPOINTING.md
+++ b/CHECKPOINTING.md
@@ -11,21 +11,28 @@ For instance, the stream position can be saved every 15 seconds in Azure blobs,
To store checkpoints in Azure blobs the configuration looks like this:
```
-iothub-checkpointing {
- enabled = true
- frequency = 15s
- countThreshold = 1000
- timeThreshold = 30s
- storage {
- rwTimeout = 5s
- backendType = "AzureBlob"
- namespace = "iothub-react-checkpoints"
- azureblob {
- lease = 15s
- useEmulator = false
- protocol = "https"
- account = "..."
- key = "..."
+iothub-react{
+
+ [... other settings ...]
+
+ checkpointing {
+ enabled = true
+ frequency = 15s
+ countThreshold = 1000
+ timeThreshold = 30s
+
+ storage {
+ rwTimeout = 5s
+ namespace = "iothub-react-checkpoints"
+
+ backendType = "AzureBlob"
+ azureblob {
+ lease = 15s
+ useEmulator = false
+ protocol = "https"
+ account = "..."
+ key = "..."
+ }
}
}
}
@@ -35,13 +42,13 @@ Soon it will be possible to plug in custom storage backends implementing a simpl
[interface](src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/Backends/CheckpointBackend.scala)
to read and write the stream position.
-There is also one API parameter to enabled/disable the mechanism, for example:
+There is also one API parameter to enable/disable the checkpointing feature, for example:
```scala
val start = java.time.Instant.now()
val withCheckpoints = false
-IoTHub.source(start, withCheckpoints)
+IoTHub().source(start, withCheckpoints)
.map(m => jsonParser.readValue(m.contentAsString, classOf[Temperature]))
.filter(_.value > 100)
.to(console)
@@ -52,19 +59,19 @@ IoTHub.source(start, withCheckpoints)
### Configuration
-The following table describes the impact of the settings within the `iothub-checkpointing`
+The following table describes the impact of the settings within the `checkpointing`
configuration block. For further information, you can also check the
[reference.conf](src/main/resources/reference.conf) file.
| Setting | Type | Example | Description |
|---------|------|---------|-------------|
-| **enabled** | bool | true | Global switch to enable/disable the checkpointing feature. This value overrides the API parameter "withCheckpoints". |
-| **frequency** | duration | 15s | How often to check if the offset in memory should be saved to storage. The check is scheduled for each partition individually. |
+| **enabled** | bool | true | Global switch to enable/disable the checkpointing feature. This value overrides the API parameter "withCheckpoints". |
+| **frequency** | duration | 15s | How often to check if the offset in memory should be saved to storage. The check is scheduled after at least one message has been received, for each partition individually. |
| **countThreshold** | int | 1000 | How many messages to stream before saving the position. The setting is applied to each partition individually. The value should be big enough to take into account buffering and batching. |
-| **timeThreshold** | duration | 60s | In case of low traffic (i.e. when not reaching countThreshold), save the stream position that is older than this value.|
-| storage.**rwTimeout** | duration | 5000ms | How long to wait, when writing to the storage, before triggering a storage timeout exception. |
-| storage.**backendType** | string or class name | "AzureBlob" | Currently only "AzureBlob" is supported. The name of the backend, or the class FQDN, to use to write to the storage. This provides a way to inject custom storage logic. |
-| storage.**namespace** | string | "mycptable" | The table/container which will contain the checkpoints data. When streaming data from multiple IoT hubs, you can use this setting, to store each stream position to a common storage, but in separate tables/containers. |
+| **timeThreshold** | duration | 60s | In case of low traffic (i.e. when not reaching countThreshold), save a stream position older than this value.|
+| storage.**rwTimeout** | duration | 5000ms | How long to wait, when writing to the storage, before triggering a storage timeout exception. |
+| storage.**namespace** | string | "mycptable" | The table/container which will contain the checkpoints data. When streaming data from multiple IoT Hubs, you can use this setting to use separate tables/containers. |
+| storage.**backendType** | string or class name | "AzureBlob" | Currently "AzureBlob" and "Cassandra" are supported. The name of the backend, or the class FQDN, to use to write to the storage. This provides a way to inject custom storage logic. |
### Runtime
@@ -86,3 +93,11 @@ Legend:
* **Start point**: whether the client provides a starting position (date or offset) or ask for all
the events from the beginning
* **Saved position**: whether there is a position saved in the storage
+
+### Edge cases
+
+* Azure IoT Hub stores messages up to 7 days. It's possible that the position stored doesn't exist
+ anymore. In such case the stream will start from the first message available.
+* If the checkpoint position is ahead of the last available message, the stream will fail with an
+ error. This can happen only with invalid configurations where two streams are sharing the
+ same checkpoints.
diff --git a/Codestyle.IntelliJ.xml b/Codestyle.IntelliJ.xml
deleted file mode 100644
index 0e5064c..0000000
--- a/Codestyle.IntelliJ.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/README.md b/README.md
index b1df8db..480b7ef 100644
--- a/README.md
+++ b/README.md
@@ -4,13 +4,19 @@
[![Issues][issues-badge]][issues-url]
[![Gitter][gitter-badge]][gitter-url]
-# IoTHubReact
-IoTHub React is an Akka Stream library that can be used to read data from
-[Azure IoT Hub](https://azure.microsoft.com/en-us/services/iot-hub/), via a reactive stream with
-asynchronous back pressure. Azure IoT Hub is a service used to connect thousands to millions of
-devices to the Azure cloud.
+# IoTHubReact
-A simple example on how to use the library in Scala is the following:
+IoTHub React is an Akka Stream library that can be used to read data from
+[Azure IoT Hub](https://azure.microsoft.com/en-us/services/iot-hub/), via a **reactive stream** with
+**asynchronous back pressure**, and to send messages to connected devices.
+Azure IoT Hub is a service used to connect thousands to millions of devices to the Azure cloud.
+
+The library can be used both in Java and Scala, providing a fluent DSL for both programming
+languages, similarly to the approach used by Akka.
+
+The following is a simple example showing how to use the library in Scala. A stream of incoming
+telemetry data is read, parsed and converted to a `Temperature` object, and then filtered based on
+the temperature value:
```scala
IoTHub().source()
@@ -32,13 +38,10 @@ new IoTHub().source()
.run(streamMaterializer);
```
-A stream of incoming telemetry data is read, parsed and converted to a `Temperature` object and
-filtered based on the temperature value.
-
#### Streaming from IoT hub to _any_
-A more interesting example is reading telemetry data from Azure IoTHub, and sending it to a Kafka
-topic, so it can be consumed by other services downstream:
+An interesting example is reading telemetry data from Azure IoT Hub, and sending it to a Kafka
+topic, so that it can be consumed by other services downstream:
```scala
...
@@ -74,14 +77,17 @@ IoTHub().source()
### IoT hub partitions
-The library supports also reading from a single
-[IoTHub partition](https://azure.microsoft.com/en-us/documentation/articles/event-hubs-overview),
-so a service that consumes IoTHub events can create multiple streams and process them independently.
+The library supports reading from a subset of
+[partitions](https://azure.microsoft.com/en-us/documentation/articles/event-hubs-overview),
+to enable the development of distributed applications. Consider for instance the scenario of a
+client application deployed to multiple nodes, where each node process independently a subset of
+the incoming telemetry.
```scala
-val partitionNumber = 1
+val p1 = 0
+val p2 = 3
-IoTHub.source(partitionNumber)
+IoTHub().source(PartitionList(Seq(p1, p2)))
.map(m => parse(m.contentAsString).extract[Temperature])
.filter(_.value > 100)
.to(console)
@@ -96,57 +102,84 @@ It's possible to start the stream from a given date and time too:
```scala
val start = java.time.Instant.now()
-IoTHub.source(start)
+IoTHub().source(start)
.map(m => parse(m.contentAsString).extract[Temperature])
.filter(_.value > 100)
.to(console)
.run()
```
-### Stream processing restart, saving the current position
+### Stream processing restart - saving the current position
The library provides a mechanism to restart the stream from a recent *checkpoint*, to be resilient
to restarts and crashes.
*Checkpoints* are saved automatically, with a configured frequency, on a storage provided.
-For instance, the stream position can be saved every 15 seconds, in a table in Cassandra, on Azure
-blobs, or a custom backend.
+For instance, the stream position can be saved every 15 seconds, in a table in Cassandra, or using
+Azure blobs, or a custom backend.
To store checkpoints in Azure blobs the configuration looks like this:
```
-iothub-checkpointing {
- enabled = true
- frequency = 15s
- countThreshold = 1000
- timeThreshold = 30s
- storage {
- rwTimeout = 5s
- backendType = "AzureBlob"
- namespace = "iothub-react-checkpoints"
- azureblob {
- lease = 15s
- useEmulator = false
- protocol = "https"
- account = "..."
- key = "..."
+iothub-react{
+
+ [... other settings ...]
+
+ checkpointing {
+ enabled = true
+ frequency = 15s
+ countThreshold = 1000
+ timeThreshold = 30s
+
+ storage {
+ rwTimeout = 5s
+ namespace = "iothub-react-checkpoints"
+
+ backendType = "AzureBlob"
+ azureblob {
+ lease = 15s
+ useEmulator = false
+ protocol = "https"
+ account = "..."
+ key = "..."
+ }
}
}
}
```
-There are some [configuration parameters](src/main/resources/reference.conf) to manage the
-checkpoint behavior, and soon it will also be possible to plug-in custom storage backends,
+Similarly, to store checkpoints in Cassandra:
+
+```
+iothub-react{
+ [...]
+ checkpointing {
+ [...]
+ storage {
+ [...]
+
+ backendType = "cassandra"
+ cassandra {
+ cluster = "localhost:9042"
+ replicationFactor = 3
+ }
+ }
+ }
+}
+```
+
+There are some [configuration settings](src/main/resources/reference.conf) to manage the
+checkpoint behavior, and in future it will also be possible to plug-in custom storage backends,
implementing a simple
[interface](src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/Backends/CheckpointBackend.scala)
to read and write the stream position.
-There is also one API parameter to enabled/disable the mechanism, for example:
+There is also one API parameter to enabled/disable the checkpointing feature, for example:
```scala
val start = java.time.Instant.now()
val withCheckpoints = false
-IoTHub.source(start, withCheckpoints)
+IoTHub().source(start, withCheckpoints)
.map(m => parse(m.contentAsString).extract[Temperature])
.filter(_.value > 100)
.to(console)
@@ -154,12 +187,13 @@ IoTHub.source(start, withCheckpoints)
```
## Build configuration
+
IoTHubReact is available on Maven Central, you just need to add the following reference in
your `build.sbt` file:
```scala
libraryDependencies ++= {
- val iothubReactV = "0.7.0"
+ val iothubReactV = "0.8.0"
Seq(
"com.microsoft.azure.iot" %% "iothub-react" % iothubReactV
@@ -172,83 +206,131 @@ or this dependency in `pom.xml` file if working with Maven:
```xml
com.microsoft.azure.iot
- iothub-react_2.11
- 0.7.0
+ iothub-react_2.12
+ 0.8.0
```
### IoTHub configuration
-IoTHubReact uses a configuration file to fetch the parameters required to connect to Azure IoTHub.
+
+IoTHubReact uses a configuration file to fetch the parameters required to connect to Azure IoT Hub.
The exact values to use can be found in the [Azure Portal](https://portal.azure.com):
-* **namespace**: it is the first part of the _Event Hub-compatible endpoint_, which usually has
- this format: `sb://.servicebus.windows.net/`
-* **name**: see _Event Hub-compatible name_
-* **keyname**: usually the value is `service`
-* **key**: the Primary Key can be found under _shared access policies/service_ policy (it's a
- base64 encoded string)
+Properties required to receive device-to-cloud messages:
-The values should be stored in your `application.conf` resource (or equivalent), which can
-reference environment settings if you prefer.
+* **hubName**: see `Endpoints` ⇒ `Messaging` ⇒ `Events` ⇒ `Event Hub-compatible name`
+* **hubEndpoint**: see `Endpoints` ⇒ `Messaging` ⇒ `Events` ⇒ `Event Hub-compatible endpoint`
+* **hubPartitions**: see `Endpoints` ⇒ `Messaging` ⇒ `Events` ⇒ `Partitions`
+* **accessPolicy**: usually `service`, see `Shared access policies`
+* **accessKey**: see `Shared access policies` ⇒ `key name` ⇒ `Primary key` (it's a base64 encoded string)
+
+Properties required to send cloud-to-device messages:
+
+* **accessHostName**: see `Shared access policies` ⇒ `key name` ⇒ `Connection string` ⇒ `HostName`
+
+The values should be stored in your `application.conf` resource (or equivalent). Optionally you can
+reference environment settings if you prefer, for example to hide sensitive data.
```
-iothub {
- namespace = ""
- name = ""
- keyName = ""
- key = ""
- partitions =
+iothub-react {
+
+ connection {
+ hubName = ""
+ hubEndpoint = ""
+ hubPartitions =
+ accessPolicy = ""
+ accessKey = ""
+ accessHostName = ""
+ }
+
+ [... other settings...]
}
````
Example using environment settings:
```
-iothub {
- namespace = ${?IOTHUB_NAMESPACE}
- name = ${?IOTHUB_NAME}
- keyName = ${?IOTHUB_ACCESS_KEY_NAME}
- key = ${?IOTHUB_ACCESS_KEY_VALUE}
- partitions = ${?IOTHUB_PARTITIONS}
+iothub-react {
+
+ connection {
+ hubName = ${?IOTHUB_EVENTHUB_NAME}
+ hubEndpoint = ${?IOTHUB_EVENTHUB_ENDPOINT}
+ hubPartitions = ${?IOTHUB_EVENTHUB_PARTITIONS}
+ accessPolicy = ${?IOTHUB_ACCESS_POLICY}
+ accessKey = ${?IOTHUB_ACCESS_KEY}
+ accessHostName = ${?IOTHUB_ACCESS_HOSTNAME}
+ }
+
+ [... other settings...]
}
````
+Note that the library will automatically use these environment variables, unless overridden
+in the configuration file (all the default settings are stored in `reference.conf`).
+
+The logging level can be managed overriding Akka configuration, for example:
+
+```
+akka {
+ # Options: OFF, ERROR, WARNING, INFO, DEBUG
+ loglevel = "WARNING"
+}
+```
+
There are other settings, to tune performance and connection details:
-* **consumerGroup**: the
+* **streaming.consumerGroup**: the
[consumer group](https://azure.microsoft.com/en-us/documentation/articles/event-hubs-overview)
used during the connection
-* **receiverBatchSize**: the number of messages retrieved on each call to Azure IoT hub. The
+* **streaming.receiverBatchSize**: the number of messages retrieved on each call to Azure IoT hub. The
default (and maximum) value is 999.
-* **receiverTimeout**: timeout applied to calls while retrieving messages. The default value is
+* **streaming.receiverTimeout**: timeout applied to calls while retrieving messages. The default value is
3 seconds.
+* **checkpointing.enabled**: whether checkpointing is eanbled
-The complete configuration reference is available in
+The complete configuration reference (and default value) is available in
[reference.conf](src/main/resources/reference.conf).
## Samples
-The project includes 4 demos, showing some of the use cases and how IoThub React API works.
-All the demos require an instance of Azure IoT hub, with some devices, and messages.
+The project includes multiple demos, showing some of the use cases and how IoThub React API works.
+All the demos require an instance of Azure IoT hub, with some devices and messages.
1. **DisplayMessages** [Java]: how to stream Azure IoT hub withing a Java application, filtering
temperature values greater than 60C
-2. **OutputMessagesToConsole** [Scala]: stream all Temeprature events to console
-3. **MessagesThroughput** [Scala]: stream all IoT hub messages, showing the current speed, and
- optionally throttling the speed to 200 msg/sec
-4. **Checkpoints** [Scala]: demonstrate how the stream can be restarted without losing its position.
- The current position is stored in a Cassandra table (we suggest to run a docker container for
- the purpose of the demo, e.g. `docker run -ip 9042:9042 --rm cassandra`)
+2. **SendMessageToDevice** [Java]: how to turn on a fan when a device reports a temperature higher
+ than 22C
+3. **AllMessagesFromBeginning** [Scala]: simple example streaming all the events in the hub.
+4. **OnlyRecentMessages** [Scala]: stream all the events, starting from the current time.
+5. **OnlyTwoPartitions** [Scala]: shows how to stream events from a subset of partitions.
+6. **MultipleDestinations** [Scala]: shows how to read once and deliver events to multiple destinations.
+7. **FilterByMessageType** [Scala]: how to filter events by message type. The type must be set by
+ the device.
+8. **FilterByDeviceID** [Scala]: how to filter events by device ID. The device ID is automatically
+ set by Azure IoT SDK.
+9. **CloseStream** [Scala]: show how to close the stream
+10. **SendMessageToDevice** [Scala]: shows the API to send messages to connected devices.
+11. **PrintTemperature** [Scala]: stream all Temperature events and print data to the console.
+12. **Throughput** [Scala]: stream all events and display statistics about the throughput.
+13. **Throttling** [Scala]: throttle the incoming stream to a defined speed of events/second.
+14. **Checkpoints** [Scala]: demonstrates how the stream can be restarted without losing its position.
+ The current position is stored in a Cassandra table (we suggest to run a docker container for
+ the purpose of the demo, e.g. `docker run -ip 9042:9042 --rm cassandra`).
+15. **SendMessageToDevice** [Scala]: another example showing how to send 2 different messages to
+ connected devices.
We provide a [device simulator](tools/devices-simulator/README.md) in the tools section,
-which will help setting up these requirements.
+which will help simulating some devices sending sample telemetry.
When ready, you should either edit the `application.conf` configuration files
([scala](samples-scala/src/main/resources/application.conf) and
[java](samples-java/src/main/resources/application.conf))
-with your credentials, or set the corresponding global variables.
+with your credentials, or set the corresponding environment variables.
Follow the instructions in the previous section on how to set the correct values.
+The sample folders include also some scripts showing how to setup the environment variables in
+[Linux/MacOS](samples-scala/setup-env-vars.sh) and [Windows](samples-scala/setup-env-vars.bat).
+
* [`samples-scala`](samples-scala/src/main/scala):
You can use `sbt run` to run the demos (or the `run_samples.*` scripts)
* [`samples-java`](samples-java/src/main/java):
@@ -257,9 +339,8 @@ Follow the instructions in the previous section on how to set the correct values
## Future work
+* allow to redefine the streaming graph at runtime, e.g. add/remove partitions on the fly
* improve asynchronicity by using EventHub SDK async APIs
-* add Sink for Cloud2Device scenarios. `IoTHub.Sink` will allow cloud services to send messages
- to devices (via Azure IoTHub)
# Contribute Code
@@ -268,7 +349,8 @@ If you want/plan to contribute, we ask you to sign a [CLA](https://cla.microsoft
a pull-request.
If you are sending a pull request, we kindly request to check the code style with IntelliJ IDEA,
-importing the settings from `Codestyle.IntelliJ.xml`.
+importing the settings from
+[`Codestyle.IntelliJ.xml`](https://github.com/Azure/toketi-iot-tools/blob/dev/Codestyle.IntelliJ.xml).
[maven-badge]: https://img.shields.io/maven-central/v/com.microsoft.azure.iot/iothub-react_2.11.svg
diff --git a/build.sbt b/build.sbt
index 5b6ebac..4d8c381 100644
--- a/build.sbt
+++ b/build.sbt
@@ -2,41 +2,37 @@
name := "iothub-react"
organization := "com.microsoft.azure.iot"
-version := "0.7.0"
-scalaVersion := "2.11.8"
-crossScalaVersions := Seq("2.11.8", "2.12.0-RC1")
+version := "0.8.0"
+//version := "0.8.0-DEV.170106a"
-logLevel := Level.Warn // Debug|Info|Warn|Error
-scalacOptions ++= Seq("-deprecation", "-explaintypes", "-unchecked", "-feature")
+scalaVersion := "2.12.1"
+crossScalaVersions := Seq("2.11.8", "2.12.1")
libraryDependencies <++= (scalaVersion) {
scalaVersion ⇒
- val azureEventHubSDKVersion = "0.8.2"
+ val azureEventHubSDKVersion = "0.9.0"
val azureStorageSDKVersion = "4.4.0"
- val iothubClientVersion = "1.0.14"
- val scalaTestVersion = "3.0.0"
- val jacksonVersion = "2.8.3"
- val akkaStreamVersion = "2.4.11"
- val datastaxDriverVersion = "3.0.2"
- val json4sVersion = "3.4.1"
+ val iothubDeviceClientVersion = "1.0.15"
+ val iothubServiceClientVersion = "1.0.10"
+ val scalaTestVersion = "3.0.1"
+ val datastaxDriverVersion = "3.1.1"
+ val json4sVersion = "3.5.0"
+ val akkaStreamVersion = "2.4.16"
Seq(
// Library dependencies
- "com.typesafe.akka" %% "akka-stream" % akkaStreamVersion,
+ "com.microsoft.azure.iothub-java-client" % "iothub-java-service-client" % iothubServiceClientVersion,
"com.microsoft.azure" % "azure-eventhubs" % azureEventHubSDKVersion,
"com.microsoft.azure" % "azure-storage" % azureStorageSDKVersion,
"com.datastax.cassandra" % "cassandra-driver-core" % datastaxDriverVersion,
+ "com.typesafe.akka" %% "akka-stream" % akkaStreamVersion,
"org.json4s" %% "json4s-native" % json4sVersion,
"org.json4s" %% "json4s-jackson" % json4sVersion,
// Tests dependencies
"org.scalatest" %% "scalatest" % scalaTestVersion % "test",
- "com.microsoft.azure.iothub-java-client" % "iothub-java-device-client" % iothubClientVersion % "test",
-
- // Remove < % "test" > to run samples-scala against the local workspace
- "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion % "test",
- "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion % "test"
+ "com.microsoft.azure.iothub-java-client" % "iothub-java-device-client" % iothubDeviceClientVersion % "test"
)
}
@@ -62,6 +58,20 @@ bintrayReleaseOnPublish in ThisBuild := true
// Required in Sonatype
pomExtra :=
- https://github.com/Azure/toketi-iothubreact
- https://github.com/Azure/toketi-iothubreact
- microsoftMicrosoft
+ https://github.com/Azure/toketi-iothubreact
+
+ https://github.com/Azure/toketi-iothubreact
+
+
+
+ microsoft Microsoft
+
+
+
+/** Miscs
+ */
+logLevel := Level.Debug // Debug|Info|Warn|Error
+scalacOptions ++= Seq("-deprecation", "-explaintypes", "-unchecked", "-feature")
+showTiming := true
+fork := true
+parallelExecution := true
diff --git a/devices.json.enc b/devices.json.enc
index 58056a8..4a0da66 100644
Binary files a/devices.json.enc and b/devices.json.enc differ
diff --git a/project/build.properties b/project/build.properties
index 13d3ee7..27e88aa 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1 +1 @@
-sbt.version = 0.13.12
\ No newline at end of file
+sbt.version=0.13.13
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 8dd913f..1f09cca 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -1 +1,7 @@
+logLevel := Level.Warn
+
+// publishing dev snapshots to Bintray
addSbtPlugin("me.lessis" % "bintray-sbt" % "0.3.0")
+
+// sbt assembly
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.3")
diff --git a/samples-java/pom.xml b/samples-java/pom.xml
index 1f96373..1583feb 100644
--- a/samples-java/pom.xml
+++ b/samples-java/pom.xml
@@ -6,7 +6,7 @@
com.microsoft.azure.iot
iothub-react-demo
- 0.7.0
+ 0.8.0
@@ -18,8 +18,8 @@
com.microsoft.azure.iot
- iothub-react_2.11
- 0.7.0
+ iothub-react_2.12
+ 0.8.0-DEV.161101a
com.fasterxml.jackson.core
diff --git a/samples-java/setup-env-vars.bat b/samples-java/setup-env-vars.bat
new file mode 100644
index 0000000..62dfdbd
--- /dev/null
+++ b/samples-java/setup-env-vars.bat
@@ -0,0 +1,41 @@
+:: Populate the following environment variables, and execute this file before running
+:: IoT Hub to Cassandra.
+::
+:: For more information about where to find these values, more information here:
+::
+:: * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-create-through-portal#endpoints
+:: * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-java-java-getstarted
+::
+::
+:: Example:
+::
+:: SET IOTHUB_EVENTHUB_NAME = "my-iothub-one"
+::
+:: SET IOTHUB_EVENTHUB_ENDPOINT = "sb://iothub-ns-myioth-75186-9fb862f912.servicebus.windows.net/"
+::
+:: SET IOTHUB_EVENTHUB_PARTITIONS = 4
+::
+:: SET IOTHUB_IOTHUB_ACCESS_POLICY = "service"
+::
+:: SET IOTHUB_ACCESS_KEY = "6XdRSFB9H61f+N3uOdBJiKwzeqbZUj1K//T2jFyewN4="
+::
+:: SET IOTHUB_ACCESS_HOSTNAME = "my-iothub-one.azure-devices.net"
+::
+
+:: see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible name"
+SET IOTHUB_EVENTHUB_NAME = ""
+
+:: see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible endpoint"
+SET IOTHUB_EVENTHUB_ENDPOINT = ""
+
+:: see: Endpoints ⇒ Messaging ⇒ Events ⇒ Partitions
+SET IOTHUB_EVENTHUB_PARTITIONS = ""
+
+:: see: Shared access policies, we suggest to use "service" here
+SET IOTHUB_IOTHUB_ACCESS_POLICY = ""
+
+:: see: Shared access policies ⇒ key name ⇒ Primary key
+SET IOTHUB_ACCESS_KEY = ""
+
+:: see: Shared access policies ⇒ key name ⇒ Connection string ⇒ HostName
+SET IOTHUB_ACCESS_HOSTNAME = ""
diff --git a/samples-java/setup-env-vars.ps1 b/samples-java/setup-env-vars.ps1
new file mode 100644
index 0000000..0e8fa7a
--- /dev/null
+++ b/samples-java/setup-env-vars.ps1
@@ -0,0 +1,41 @@
+# Populate the following environment variables, and execute this file before running
+# IoT Hub to Cassandra.
+#
+# For more information about where to find these values, more information here:
+#
+# * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-create-through-portal#endpoints
+# * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-java-java-getstarted
+#
+#
+# Example:
+#
+# $env:IOTHUB_EVENTHUB_NAME = 'my-iothub-one'
+#
+# $env:IOTHUB_EVENTHUB_ENDPOINT = 'sb://iothub-ns-myioth-75186-9fb862f912.servicebus.windows.net/'
+#
+# $env:IOTHUB_EVENTHUB_PARTITIONS = 4
+#
+# $env:IOTHUB_IOTHUB_ACCESS_POLICY = 'service'
+#
+# $env:IOTHUB_ACCESS_KEY = '6XdRSFB9H61f+N3uOdBJiKwzeqbZUj1K//T2jFyewN4='
+#
+# SET IOTHUB_ACCESS_HOSTNAME = "my-iothub-one.azure-devices.net"
+#
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible name"
+$env:IOTHUB_EVENTHUB_NAME = ''
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible endpoint"
+$env:IOTHUB_EVENTHUB_ENDPOINT = ''
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ Partitions
+$env:IOTHUB_EVENTHUB_PARTITIONS = ''
+
+# see: Shared access policies, we suggest to use "service" here
+$env:IOTHUB_IOTHUB_ACCESS_POLICY = ''
+
+# see: Shared access policies ⇒ key name ⇒ Primary key
+$env:IOTHUB_ACCESS_KEY = ''
+
+# see: Shared access policies ⇒ key name ⇒ Connection string ⇒ HostName
+$env:IOTHUB_ACCESS_HOSTNAME = ''
diff --git a/samples-java/setup-env-vars.sh b/samples-java/setup-env-vars.sh
new file mode 100644
index 0000000..b9a5717
--- /dev/null
+++ b/samples-java/setup-env-vars.sh
@@ -0,0 +1,41 @@
+# Populate the following environment variables, and execute this file before running
+# IoT Hub to Cassandra.
+#
+# For more information about where to find these values, more information here:
+#
+# * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-create-through-portal#endpoints
+# * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-java-java-getstarted
+#
+#
+# Example:
+#
+# export IOTHUB_EVENTHUB_NAME="my-iothub-one"
+#
+# export IOTHUB_EVENTHUB_ENDPOINT="sb://iothub-ns-myioth-75186-9fb862f912.servicebus.windows.net/"
+#
+# export IOTHUB_EVENTHUB_PARTITIONS=4
+#
+# export IOTHUB_IOTHUB_ACCESS_POLICY="service"
+#
+# export IOTHUB_ACCESS_KEY="6XdRSFB9H61f+N3uOdBJiKwzeqbZUj1K//T2jFyewN4="
+#
+# export IOTHUB_ACCESS_HOSTNAME="my-iothub-one.azure-devices.net"
+#
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible name"
+export IOTHUB_EVENTHUB_NAME=""
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible endpoint"
+export IOTHUB_EVENTHUB_ENDPOINT=""
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ Partitions
+export IOTHUB_EVENTHUB_PARTITIONS=""
+
+# see: Shared access policies, we suggest to use "service" here
+export IOTHUB_IOTHUB_ACCESS_POLICY=""
+
+# see: Shared access policies ⇒ key name ⇒ Primary key
+export IOTHUB_ACCESS_KEY=""
+
+# see: Shared access policies ⇒ key name ⇒ Connection string ⇒ HostName
+export IOTHUB_ACCESS_HOSTNAME=""
diff --git a/samples-java/src/main/java/DisplayMessages/Demo.java b/samples-java/src/main/java/DisplayMessages/Demo.java
index d49df8d..68409a1 100644
--- a/samples-java/src/main/java/DisplayMessages/Demo.java
+++ b/samples-java/src/main/java/DisplayMessages/Demo.java
@@ -7,11 +7,13 @@ import akka.NotUsed;
import akka.stream.javadsl.Sink;
import akka.stream.javadsl.Source;
import com.fasterxml.jackson.databind.ObjectMapper;
-import com.microsoft.azure.iot.iothubreact.IoTMessage;
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice;
+import com.microsoft.azure.iot.iothubreact.javadsl.PartitionList;
import com.microsoft.azure.iot.iothubreact.javadsl.IoTHub;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
+import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.CompletionStage;
@@ -20,44 +22,51 @@ import static java.lang.System.out;
/**
* Retrieve messages from IoT hub and display the data in the console
*/
-public class Demo extends ReactiveStreamingApp {
-
+public class Demo extends ReactiveStreamingApp
+{
static ObjectMapper jsonParser = new ObjectMapper();
- public static void main(String args[]) {
-
- // Source retrieving messages from one IoT hub partition (e.g. partition 2)
- //Source messages = new IoTHubPartition(2).source();
+ public static void main(String args[])
+ {
+ // Source retrieving messages from two IoT hub partitions (e.g. partition 2 and 5)
+ Source messagesFromTwoPartitions = new IoTHub().source(new PartitionList(Arrays.asList(2, 5)));
// Source retrieving from all IoT hub partitions for the past 24 hours
- Source messages = new IoTHub().source(Instant.now().minus(1, ChronoUnit.DAYS));
+ Source messages = new IoTHub().source(Instant.now().minus(1, ChronoUnit.DAYS));
messages
- .filter(m -> m.model().equals("temperature"))
+ .filter(m -> m.messageType().equals("temperature"))
.map(m -> parseTemperature(m))
.filter(x -> x != null && (x.value < 18 || x.value > 22))
.to(console())
.run(streamMaterializer);
}
- public static Sink> console() {
- return Sink.foreach(m -> {
- if (m.value <= 18) {
+ public static Sink> console()
+ {
+ return Sink.foreach(m ->
+ {
+ if (m.value <= 18)
+ {
out.println("Device: " + m.deviceId + ": temperature too LOW: " + m.value);
- } else {
+ } else
+ {
out.println("Device: " + m.deviceId + ": temperature to HIGH: " + m.value);
}
});
}
- public static Temperature parseTemperature(IoTMessage m) {
- try {
+ public static Temperature parseTemperature(MessageFromDevice m)
+ {
+ try
+ {
Map hash = jsonParser.readValue(m.contentAsString(), Map.class);
Temperature t = new Temperature();
t.value = Double.parseDouble(hash.get("value").toString());
t.deviceId = m.deviceId();
return t;
- } catch (Exception e) {
+ } catch (Exception e)
+ {
return null;
}
}
diff --git a/samples-java/src/main/java/DisplayMessages/Temperature.java b/samples-java/src/main/java/DisplayMessages/Temperature.java
index 050f7da..f1679a1 100644
--- a/samples-java/src/main/java/DisplayMessages/Temperature.java
+++ b/samples-java/src/main/java/DisplayMessages/Temperature.java
@@ -5,4 +5,5 @@ package DisplayMessages;
public class Temperature {
String deviceId;
Double value;
+ String time;
}
diff --git a/samples-java/src/main/java/SendMessageToDevice/Demo.java b/samples-java/src/main/java/SendMessageToDevice/Demo.java
new file mode 100644
index 0000000..33d6954
--- /dev/null
+++ b/samples-java/src/main/java/SendMessageToDevice/Demo.java
@@ -0,0 +1,80 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package SendMessageToDevice;
+
+import akka.Done;
+import akka.NotUsed;
+import akka.stream.javadsl.Sink;
+import akka.stream.javadsl.Source;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice;
+import com.microsoft.azure.iot.iothubreact.MessageToDevice;
+import com.microsoft.azure.iot.iothubreact.filters.MessageType;
+import com.microsoft.azure.iot.iothubreact.javadsl.IoTHub;
+
+import java.time.Instant;
+import java.time.temporal.ChronoUnit;
+import java.util.Map;
+import java.util.concurrent.CompletionStage;
+
+import static java.lang.System.out;
+
+/**
+ * Retrieve messages from IoT hub and display the data in the console
+ */
+public class Demo extends ReactiveStreamingApp
+{
+ static ObjectMapper jsonParser = new ObjectMapper();
+
+ public static void main(String args[])
+ {
+ // IoTHub
+ IoTHub hub = new IoTHub();
+
+ // Source retrieving from all IoT hub partitions for the past 24 hours
+ Source messages = hub.source(Instant.now().minus(1, ChronoUnit.DAYS));
+
+ MessageToDevice turnFanOn = new MessageToDevice("turnFanOn")
+ .addProperty("speed", "high")
+ .addProperty("duration", "60");
+
+ MessageType msgTypeFilter = new MessageType("temperature");
+
+ messages
+ .filter(m -> msgTypeFilter.filter(m))
+ .map(m -> parseTemperature(m))
+ .filter(x -> x != null && x.deviceId.equalsIgnoreCase("livingRoom") && x.value > 22)
+ .map(t -> turnFanOn.to(t.deviceId))
+ .to(hub.messageSink())
+ .run(streamMaterializer);
+ }
+
+ public static Sink> console()
+ {
+ return Sink.foreach(m ->
+ {
+ if (m.value <= 18)
+ {
+ out.println("Device: " + m.deviceId + ": temperature too LOW: " + m.value);
+ } else
+ {
+ out.println("Device: " + m.deviceId + ": temperature to HIGH: " + m.value);
+ }
+ });
+ }
+
+ public static Temperature parseTemperature(MessageFromDevice m)
+ {
+ try
+ {
+ Map hash = jsonParser.readValue(m.contentAsString(), Map.class);
+ Temperature t = new Temperature();
+ t.value = Double.parseDouble(hash.get("value").toString());
+ t.deviceId = m.deviceId();
+ return t;
+ } catch (Exception e)
+ {
+ return null;
+ }
+ }
+}
diff --git a/samples-java/src/main/java/SendMessageToDevice/ReactiveStreamingApp.java b/samples-java/src/main/java/SendMessageToDevice/ReactiveStreamingApp.java
new file mode 100644
index 0000000..aaa2f27
--- /dev/null
+++ b/samples-java/src/main/java/SendMessageToDevice/ReactiveStreamingApp.java
@@ -0,0 +1,18 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package SendMessageToDevice;
+
+import akka.actor.ActorSystem;
+import akka.stream.ActorMaterializer;
+import akka.stream.Materializer;
+
+/**
+ * Initialize reactive streaming
+ */
+public class ReactiveStreamingApp
+{
+
+ private static ActorSystem system = ActorSystem.create("Demo");
+
+ protected final static Materializer streamMaterializer = ActorMaterializer.create(system);
+}
diff --git a/samples-java/src/main/java/SendMessageToDevice/Temperature.java b/samples-java/src/main/java/SendMessageToDevice/Temperature.java
new file mode 100644
index 0000000..f71927a
--- /dev/null
+++ b/samples-java/src/main/java/SendMessageToDevice/Temperature.java
@@ -0,0 +1,10 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package SendMessageToDevice;
+
+public class Temperature
+{
+ String deviceId;
+ Double value;
+ String time;
+}
diff --git a/samples-java/src/main/resources/application.conf b/samples-java/src/main/resources/application.conf
index f30949c..db8715e 100644
--- a/samples-java/src/main/resources/application.conf
+++ b/samples-java/src/main/resources/application.conf
@@ -1,91 +1,97 @@
// Configuration file [HOCON format]
-// IoT Hub settings can be retrieved from the Azure portal at https://portal.azure.com
-iothub {
-
- // see: "IoT Hub" >> your hub >> "Messaging" >> "Partitions"
- partitions = ${?IOTHUB_PARTITIONS}
-
- // see: "IoT Hub" >> your hub >> "Messaging" >> "Event Hub-compatible name"
- name = ${?IOTHUB_NAME}
-
- // see: "IoT Hub" >> your hub > "Messaging" >> "Event Hub-compatible endpoint"
- // e.g. from "sb://iothub-ns-toketi-i-18552-16281e72ba.servicebus.windows.net/"
- // use "iothub-ns-toketi-i-18552-16281e72ba"
- namespace = ${?IOTHUB_NAMESPACE}
-
- // see: "IoT Hub" >> your hub >> "Shared access policies"
- // e.g. you could use the predefined "iothubowner"
- keyName = ${?IOTHUB_ACCESS_KEY_NAME}
-
- // see: "IoT Hub" >> your hub >> "Shared access policies" >> key name >> "Primary key"
- key = ${?IOTHUB_ACCESS_KEY_VALUE}
-
- // see: "IoT Hub" >> your hub > "Messaging" >> Consumer groups
- // "$Default" is predefined and is the typical scenario
- consumerGroup = "$Default"
-}
-
-iothub-stream {
- // Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
- receiverTimeout = 3s
-
- // How many messages to retrieve on each pull, max is 999
- receiverBatchSize = 999
-}
-
-// IoT hub stream checkpointing options
-iothub-checkpointing {
- // Whether the checkpointing feature is enabled
- enabled = true
-
- // Checkpoints frequency (best effort), for each IoT hub partition
- // Min: 1 second, Max: 1 minute
- frequency = 10s
-
- // How many messages to stream before saving the position, for each IoT hub partition.
- // Since the stream position is saved in the Source, before the rest of the
- // Graph (Flows/Sinks), this provides a mechanism to replay buffered messages.
- // The value should be greater than receiverBatchSize
- countThreshold = 5
-
- // Store a position if its value is older than this amount of time, ignoring the threshold.
- // For instance when the telemetry stops, this will force to write the last offset after some time.
- // Min: 1 second, Max: 1 hour. Value is rounded to seconds.
- timeThreshold = 30s
-
- storage {
-
- // Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
- rwTimeout = 5s
-
- backendType = "Cassandra"
-
- // If you use the same storage while processing multiple streams, you'll want
- // to use a distinct table/container/path in each job, to to keep state isolated
- namespace = "iothub-react-checkpoints"
-
- azureblob {
- // Time allowed for a checkpoint to be written, rounded to seconds (min 15, max 60)
- lease = 15s
- // Whether to use the Azure Storage Emulator
- useEmulator = false
- // Storage credentials
- protocol = "https"
- account = ${?IOTHUB_CHECKPOINT_ACCOUNT}
- key = ${?IOTHUB_CHECKPOINT_KEY}
- }
-
- // You can easily test this with Docker --> docker run -ip 9042:9042 --rm cassandra
- cassandra {
- cluster = "localhost:9042"
- replicationFactor = 1
- }
- }
-}
-
// @see http://doc.akka.io/docs/akka/2.4.10/scala/logging.html
akka {
# Options: OFF, ERROR, WARNING, INFO, DEBUG
- loglevel = "WARNING"
+ loglevel = "INFO"
+}
+
+iothub-react {
+
+ // Connection settings can be retrieved from the Azure portal at https://portal.azure.com
+ // For more information about IoT Hub settings, see:
+ // https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-create-through-portal#endpoints
+ // https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-java-java-getstarted
+ connection {
+ // see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible name"
+ hubName = ${?IOTHUB_EVENTHUB_NAME}
+
+ // see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible endpoint"
+ hubEndpoint = ${?IOTHUB_EVENTHUB_ENDPOINT}
+
+ // see: Endpoints ⇒ Messaging ⇒ Events ⇒ Partitions
+ hubPartitions = ${?IOTHUB_EVENTHUB_PARTITIONS}
+
+ // see: "IoT Hub" ⇒ your hub ⇒ "Shared access policies"
+ // e.g. you should use the predefined "service" policy
+ accessPolicy = ${?IOTHUB_ACCESS_POLICY}
+
+ // see: Shared access policies ⇒ key name ⇒ Primary key
+ accessKey = ${?IOTHUB_ACCESS_KEY}
+
+ // see: Shared access policies ⇒ key name ⇒ Connection string ⇒ "HostName"
+ accessHostName = ${?IOTHUB_ACCESS_HOSTNAME}
+ }
+
+ streaming {
+ // see: "IoT Hub" >> your hub > "Messaging" >> Consumer groups
+ // "$Default" is predefined and is the typical scenario
+ consumerGroup = "$Default"
+
+ // Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
+ receiverTimeout = 3s
+
+ // How many messages to retrieve on each pull, max is 999
+ receiverBatchSize = 999
+ }
+
+ checkpointing {
+ // Whether the checkpointing feature is enabled
+ enabled = true
+
+ // Checkpoints frequency (best effort), for each IoT hub partition
+ // Min: 1 second, Max: 1 minute
+ frequency = 10s
+
+ // How many messages to stream before saving the position, for each IoT hub partition.
+ // Since the stream position is saved in the Source, before the rest of the
+ // Graph (Flows/Sinks), this provides a mechanism to replay buffered messages.
+ // The value should be greater than receiverBatchSize
+ countThreshold = 5
+
+ // Store a position if its value is older than this amount of time, ignoring the threshold.
+ // For instance when the telemetry stops, this will force to write the last offset after some time.
+ // Min: 1 second, Max: 1 hour. Value is rounded to seconds.
+ timeThreshold = 30s
+
+ storage {
+
+ // Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
+ rwTimeout = 5s
+
+ // Supported types (not case sensitive): Cassandra, AzureBlob
+ backendType = "Cassandra"
+
+ // If you use the same storage while processing multiple streams, you'll want
+ // to use a distinct table/container/path in each job, to to keep state isolated
+ namespace = "iothub-react-checkpoints"
+
+ azureblob {
+ // Time allowed for a checkpoint to be written, rounded to seconds (min 15, max 60)
+ lease = 15s
+ // Whether to use the Azure Storage Emulator
+ useEmulator = false
+ // Storage credentials
+ protocol = "https"
+ account = ${?IOTHUB_CHECKPOINT_ACCOUNT}
+ key = ${?IOTHUB_CHECKPOINT_KEY}
+ }
+
+ // You can easily test this with Docker --> docker run -ip 9042:9042 --rm cassandra
+ cassandra {
+ cluster = "localhost:9042"
+ replicationFactor = 1
+ }
+ }
+ }
}
diff --git a/samples-scala/build.sbt b/samples-scala/build.sbt
index 441245d..12d7b84 100644
--- a/samples-scala/build.sbt
+++ b/samples-scala/build.sbt
@@ -1,7 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
-scalaVersion := "2.11.8"
-crossScalaVersions := Seq("2.11.8", "2.12.0-RC1")
+scalaVersion := "2.12.0"
+crossScalaVersions := Seq("2.11.8", "2.12.0")
scalacOptions ++= Seq("-deprecation", "-explaintypes", "-unchecked", "-feature")
@@ -10,9 +10,9 @@ resolvers += "Dev Snapshots" at "https://dl.bintray.com/microsoftazuretoketi/tok
libraryDependencies ++= {
val prodVersion = "0.7.0"
- val devVersion = "0.7.0-DEV.161025c"
+ val devVersion = "0.8.0-DEV.161101a"
Seq(
- "com.microsoft.azure.iot" %% "iothub-react" % prodVersion
+ "com.microsoft.azure.iot" %% "iothub-react" % devVersion
)
}
diff --git a/samples-scala/setup-env-vars.bat b/samples-scala/setup-env-vars.bat
new file mode 100644
index 0000000..62dfdbd
--- /dev/null
+++ b/samples-scala/setup-env-vars.bat
@@ -0,0 +1,41 @@
+:: Populate the following environment variables, and execute this file before running
+:: IoT Hub to Cassandra.
+::
+:: For more information about where to find these values, more information here:
+::
+:: * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-create-through-portal#endpoints
+:: * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-java-java-getstarted
+::
+::
+:: Example:
+::
+:: SET IOTHUB_EVENTHUB_NAME = "my-iothub-one"
+::
+:: SET IOTHUB_EVENTHUB_ENDPOINT = "sb://iothub-ns-myioth-75186-9fb862f912.servicebus.windows.net/"
+::
+:: SET IOTHUB_EVENTHUB_PARTITIONS = 4
+::
+:: SET IOTHUB_IOTHUB_ACCESS_POLICY = "service"
+::
+:: SET IOTHUB_ACCESS_KEY = "6XdRSFB9H61f+N3uOdBJiKwzeqbZUj1K//T2jFyewN4="
+::
+:: SET IOTHUB_ACCESS_HOSTNAME = "my-iothub-one.azure-devices.net"
+::
+
+:: see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible name"
+SET IOTHUB_EVENTHUB_NAME = ""
+
+:: see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible endpoint"
+SET IOTHUB_EVENTHUB_ENDPOINT = ""
+
+:: see: Endpoints ⇒ Messaging ⇒ Events ⇒ Partitions
+SET IOTHUB_EVENTHUB_PARTITIONS = ""
+
+:: see: Shared access policies, we suggest to use "service" here
+SET IOTHUB_IOTHUB_ACCESS_POLICY = ""
+
+:: see: Shared access policies ⇒ key name ⇒ Primary key
+SET IOTHUB_ACCESS_KEY = ""
+
+:: see: Shared access policies ⇒ key name ⇒ Connection string ⇒ HostName
+SET IOTHUB_ACCESS_HOSTNAME = ""
diff --git a/samples-scala/setup-env-vars.ps1 b/samples-scala/setup-env-vars.ps1
new file mode 100644
index 0000000..0e8fa7a
--- /dev/null
+++ b/samples-scala/setup-env-vars.ps1
@@ -0,0 +1,41 @@
+# Populate the following environment variables, and execute this file before running
+# IoT Hub to Cassandra.
+#
+# For more information about where to find these values, more information here:
+#
+# * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-create-through-portal#endpoints
+# * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-java-java-getstarted
+#
+#
+# Example:
+#
+# $env:IOTHUB_EVENTHUB_NAME = 'my-iothub-one'
+#
+# $env:IOTHUB_EVENTHUB_ENDPOINT = 'sb://iothub-ns-myioth-75186-9fb862f912.servicebus.windows.net/'
+#
+# $env:IOTHUB_EVENTHUB_PARTITIONS = 4
+#
+# $env:IOTHUB_IOTHUB_ACCESS_POLICY = 'service'
+#
+# $env:IOTHUB_ACCESS_KEY = '6XdRSFB9H61f+N3uOdBJiKwzeqbZUj1K//T2jFyewN4='
+#
+# SET IOTHUB_ACCESS_HOSTNAME = "my-iothub-one.azure-devices.net"
+#
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible name"
+$env:IOTHUB_EVENTHUB_NAME = ''
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible endpoint"
+$env:IOTHUB_EVENTHUB_ENDPOINT = ''
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ Partitions
+$env:IOTHUB_EVENTHUB_PARTITIONS = ''
+
+# see: Shared access policies, we suggest to use "service" here
+$env:IOTHUB_IOTHUB_ACCESS_POLICY = ''
+
+# see: Shared access policies ⇒ key name ⇒ Primary key
+$env:IOTHUB_ACCESS_KEY = ''
+
+# see: Shared access policies ⇒ key name ⇒ Connection string ⇒ HostName
+$env:IOTHUB_ACCESS_HOSTNAME = ''
diff --git a/samples-scala/setup-env-vars.sh b/samples-scala/setup-env-vars.sh
new file mode 100644
index 0000000..b9a5717
--- /dev/null
+++ b/samples-scala/setup-env-vars.sh
@@ -0,0 +1,41 @@
+# Populate the following environment variables, and execute this file before running
+# IoT Hub to Cassandra.
+#
+# For more information about where to find these values, more information here:
+#
+# * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-create-through-portal#endpoints
+# * https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-java-java-getstarted
+#
+#
+# Example:
+#
+# export IOTHUB_EVENTHUB_NAME="my-iothub-one"
+#
+# export IOTHUB_EVENTHUB_ENDPOINT="sb://iothub-ns-myioth-75186-9fb862f912.servicebus.windows.net/"
+#
+# export IOTHUB_EVENTHUB_PARTITIONS=4
+#
+# export IOTHUB_IOTHUB_ACCESS_POLICY="service"
+#
+# export IOTHUB_ACCESS_KEY="6XdRSFB9H61f+N3uOdBJiKwzeqbZUj1K//T2jFyewN4="
+#
+# export IOTHUB_ACCESS_HOSTNAME="my-iothub-one.azure-devices.net"
+#
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible name"
+export IOTHUB_EVENTHUB_NAME=""
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible endpoint"
+export IOTHUB_EVENTHUB_ENDPOINT=""
+
+# see: Endpoints ⇒ Messaging ⇒ Events ⇒ Partitions
+export IOTHUB_EVENTHUB_PARTITIONS=""
+
+# see: Shared access policies, we suggest to use "service" here
+export IOTHUB_IOTHUB_ACCESS_POLICY=""
+
+# see: Shared access policies ⇒ key name ⇒ Primary key
+export IOTHUB_ACCESS_KEY=""
+
+# see: Shared access policies ⇒ key name ⇒ Connection string ⇒ HostName
+export IOTHUB_ACCESS_HOSTNAME=""
diff --git a/samples-scala/src/main/resources/application.conf b/samples-scala/src/main/resources/application.conf
index f30949c..51ef0df 100644
--- a/samples-scala/src/main/resources/application.conf
+++ b/samples-scala/src/main/resources/application.conf
@@ -1,91 +1,97 @@
// Configuration file [HOCON format]
-// IoT Hub settings can be retrieved from the Azure portal at https://portal.azure.com
-iothub {
-
- // see: "IoT Hub" >> your hub >> "Messaging" >> "Partitions"
- partitions = ${?IOTHUB_PARTITIONS}
-
- // see: "IoT Hub" >> your hub >> "Messaging" >> "Event Hub-compatible name"
- name = ${?IOTHUB_NAME}
-
- // see: "IoT Hub" >> your hub > "Messaging" >> "Event Hub-compatible endpoint"
- // e.g. from "sb://iothub-ns-toketi-i-18552-16281e72ba.servicebus.windows.net/"
- // use "iothub-ns-toketi-i-18552-16281e72ba"
- namespace = ${?IOTHUB_NAMESPACE}
-
- // see: "IoT Hub" >> your hub >> "Shared access policies"
- // e.g. you could use the predefined "iothubowner"
- keyName = ${?IOTHUB_ACCESS_KEY_NAME}
-
- // see: "IoT Hub" >> your hub >> "Shared access policies" >> key name >> "Primary key"
- key = ${?IOTHUB_ACCESS_KEY_VALUE}
-
- // see: "IoT Hub" >> your hub > "Messaging" >> Consumer groups
- // "$Default" is predefined and is the typical scenario
- consumerGroup = "$Default"
-}
-
-iothub-stream {
- // Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
- receiverTimeout = 3s
-
- // How many messages to retrieve on each pull, max is 999
- receiverBatchSize = 999
-}
-
-// IoT hub stream checkpointing options
-iothub-checkpointing {
- // Whether the checkpointing feature is enabled
- enabled = true
-
- // Checkpoints frequency (best effort), for each IoT hub partition
- // Min: 1 second, Max: 1 minute
- frequency = 10s
-
- // How many messages to stream before saving the position, for each IoT hub partition.
- // Since the stream position is saved in the Source, before the rest of the
- // Graph (Flows/Sinks), this provides a mechanism to replay buffered messages.
- // The value should be greater than receiverBatchSize
- countThreshold = 5
-
- // Store a position if its value is older than this amount of time, ignoring the threshold.
- // For instance when the telemetry stops, this will force to write the last offset after some time.
- // Min: 1 second, Max: 1 hour. Value is rounded to seconds.
- timeThreshold = 30s
-
- storage {
-
- // Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
- rwTimeout = 5s
-
- backendType = "Cassandra"
-
- // If you use the same storage while processing multiple streams, you'll want
- // to use a distinct table/container/path in each job, to to keep state isolated
- namespace = "iothub-react-checkpoints"
-
- azureblob {
- // Time allowed for a checkpoint to be written, rounded to seconds (min 15, max 60)
- lease = 15s
- // Whether to use the Azure Storage Emulator
- useEmulator = false
- // Storage credentials
- protocol = "https"
- account = ${?IOTHUB_CHECKPOINT_ACCOUNT}
- key = ${?IOTHUB_CHECKPOINT_KEY}
- }
-
- // You can easily test this with Docker --> docker run -ip 9042:9042 --rm cassandra
- cassandra {
- cluster = "localhost:9042"
- replicationFactor = 1
- }
- }
-}
-
// @see http://doc.akka.io/docs/akka/2.4.10/scala/logging.html
akka {
# Options: OFF, ERROR, WARNING, INFO, DEBUG
- loglevel = "WARNING"
+ loglevel = "INFO"
+}
+
+iothub-react {
+
+ // Connection settings can be retrieved from the Azure portal at https://portal.azure.com
+ // For more information about IoT Hub settings, see:
+ // https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-create-through-portal#endpoints
+ // https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-java-java-getstarted
+ connection {
+ // see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible name"
+ hubName = ${?IOTHUB_EVENTHUB_NAME}
+
+ // see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible endpoint"
+ hubEndpoint = ${?IOTHUB_EVENTHUB_ENDPOINT}
+
+ // see: Endpoints ⇒ Messaging ⇒ Events ⇒ Partitions
+ hubPartitions = ${?IOTHUB_EVENTHUB_PARTITIONS}
+
+ // see: Shared access policies
+ // e.g. you should use the predefined "service" policy
+ accessPolicy = ${?IOTHUB_ACCESS_POLICY}
+
+ // see: Shared access policies ⇒ key name ⇒ Primary key
+ accessKey = ${?IOTHUB_ACCESS_KEY}
+
+ // see: Shared access policies ⇒ key name ⇒ Connection string ⇒ "HostName"
+ accessHostName = ${?IOTHUB_ACCESS_HOSTNAME}
+ }
+
+ streaming {
+ // see: "IoT Hub" >> your hub > "Messaging" >> Consumer groups
+ // "$Default" is predefined and is the typical scenario
+ consumerGroup = "$Default"
+
+ // Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
+ receiverTimeout = 3s
+
+ // How many messages to retrieve on each pull, max is 999
+ receiverBatchSize = 999
+ }
+
+ checkpointing {
+ // Whether the checkpointing feature is enabled
+ enabled = true
+
+ // Checkpoints frequency (best effort), for each IoT hub partition
+ // Min: 1 second, Max: 1 minute
+ frequency = 5s
+
+ // How many messages to stream before saving the position, for each IoT hub partition.
+ // Since the stream position is saved in the Source, before the rest of the
+ // Graph (Flows/Sinks), this provides a mechanism to replay buffered messages.
+ // The value should be greater than receiverBatchSize
+ countThreshold = 5
+
+ // Store a position if its value is older than this amount of time, ignoring the threshold.
+ // For instance when the telemetry stops, this will force to write the last offset after some time.
+ // Min: 1 second, Max: 1 hour. Value is rounded to seconds.
+ timeThreshold = 10s
+
+ storage {
+
+ // Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
+ rwTimeout = 5s
+
+ // Supported types (not case sensitive): Cassandra, AzureBlob
+ backendType = "Cassandra"
+
+ // If you use the same storage while processing multiple streams, you'll want
+ // to use a distinct table/container/path in each job, to to keep state isolated
+ namespace = "iothub-react-checkpoints"
+
+ azureblob {
+ // Time allowed for a checkpoint to be written, rounded to seconds (min 15, max 60)
+ lease = 15s
+ // Whether to use the Azure Storage Emulator
+ useEmulator = false
+ // Storage credentials
+ protocol = "https"
+ account = ${?IOTHUB_CHECKPOINT_ACCOUNT}
+ key = ${?IOTHUB_CHECKPOINT_KEY}
+ }
+
+ // You can easily test this with Docker --> docker run -ip 9042:9042 --rm cassandra
+ cassandra {
+ cluster = "localhost:9042"
+ replicationFactor = 1
+ }
+ }
+ }
}
diff --git a/samples-scala/src/main/scala/A_APIUSage/Demo.scala b/samples-scala/src/main/scala/A_APIUSage/Demo.scala
new file mode 100644
index 0000000..e2fe464
--- /dev/null
+++ b/samples-scala/src/main/scala/A_APIUSage/Demo.scala
@@ -0,0 +1,188 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package A_APIUSage
+
+import java.time.Instant
+
+import akka.stream.scaladsl.Sink
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+import com.microsoft.azure.iot.iothubreact.filters._
+import com.microsoft.azure.iot.iothubreact.scaladsl._
+import com.microsoft.azure.iot.iothubreact.{MessageFromDevice, MessageToDevice}
+import com.microsoft.azure.iot.service.sdk.DeliveryAcknowledgement
+
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.duration._
+import scala.language.{implicitConversions, postfixOps}
+
+/** Stream all messages from beginning
+ */
+object AllMessagesFromBeginning extends App {
+
+ println("Streaming all the messages")
+
+ val messages = IoTHub().source()
+
+ val console = Sink.foreach[MessageFromDevice] {
+ m ⇒ println(s"${m.created} - ${m.deviceId} - ${m.messageType} - ${m.contentAsString}")
+ }
+
+ messages
+
+ .to(console)
+
+ .run()
+}
+
+/** Stream recent messages
+ */
+object OnlyRecentMessages extends App {
+
+ println("Streaming recent messages")
+
+ val messages = IoTHub().source(java.time.Instant.now())
+
+ val console = Sink.foreach[MessageFromDevice] {
+ m ⇒ println(s"${m.created} - ${m.deviceId} - ${m.messageType} - ${m.contentAsString}")
+ }
+
+ messages
+
+ .to(console)
+
+ .run()
+}
+
+/** Stream only from partitions 0 and 3
+ */
+object OnlyTwoPartitions extends App {
+
+ val Partition1 = 0
+ val Partition2 = 3
+
+ println(s"Streaming messages from partitions ${Partition1} and ${Partition2}")
+
+ val messages = IoTHub().source(PartitionList(Seq(Partition1, Partition2)))
+
+ val console = Sink.foreach[MessageFromDevice] {
+ m ⇒ println(s"${m.created} - ${m.deviceId} - ${m.messageType} - ${m.contentAsString}")
+ }
+
+ messages
+
+ .to(console)
+
+ .run()
+}
+
+/** Stream to 2 different consoles
+ */
+object MultipleDestinations extends App {
+
+ println("Streaming to two different consoles")
+
+ val messages = IoTHub().source(java.time.Instant.now())
+
+ val console1 = Sink.foreach[MessageFromDevice] {
+ m ⇒ if (m.messageType == "temperature") println(s"Temperature console: ${m.created} - ${m.deviceId} - ${m.contentAsString}")
+ }
+
+ val console2 = Sink.foreach[MessageFromDevice] {
+ m ⇒ if (m.messageType == "humidity") println(s"Humidity console: ${m.created} - ${m.deviceId} - ${m.contentAsString}")
+ }
+
+ messages
+
+ .alsoTo(console1)
+
+ .to(console2)
+
+ .run()
+}
+
+/** Stream only temperature messages
+ */
+object FilterByMessageType extends App {
+
+ println("Streaming only temperature messages")
+
+ val messages = IoTHub().source()
+
+ val console = Sink.foreach[MessageFromDevice] {
+ m ⇒ println(s"${m.created} - ${m.deviceId} - ${m.messageType} - ${m.contentAsString}")
+ }
+
+ messages
+
+ .filter(MessageType("temperature")) // Equivalent to: m ⇒ m.messageType == "temperature"
+
+ .to(console)
+
+ .run()
+}
+
+/** Stream only messages from "device1000"
+ */
+object FilterByDeviceID extends App {
+
+ val DeviceID = "device1000"
+
+ println(s"Streaming only messages from ${DeviceID}")
+
+ val messages = IoTHub().source()
+
+ val console = Sink.foreach[MessageFromDevice] {
+ m ⇒ println(s"${m.created} - ${m.deviceId} - ${m.messageType} - ${m.contentAsString}")
+ }
+
+ messages
+ .filter(Device(DeviceID)) // Equivalent to: m ⇒ m.deviceId == DeviceID
+
+ .to(console)
+
+ .run()
+}
+
+/** Show how to close the stream, terminating the connections to Azure IoT hub
+ */
+object CloseStream extends App {
+
+ println("Streaming all the messages, will stop in 5 seconds")
+
+ implicit val system = akka.actor.ActorSystem("system")
+
+ system.scheduler.scheduleOnce(5 seconds) {
+ hub.close()
+ }
+
+ val hub = IoTHub()
+ val messages = hub.source()
+
+ var console = Sink.foreach[MessageFromDevice] {
+ m ⇒ println(s"${m.created} - ${m.deviceId} - ${m.messageType} - ${m.contentAsString}")
+ }
+
+ messages.to(console).run()
+}
+
+/** Send a message to a device
+ */
+object SendMessageToDevice extends App with Deserialize {
+
+ val message = MessageToDevice("Turn fan ON")
+ .addProperty("speed", "high")
+ .addProperty("duration", "60")
+ .expiry(Instant.now().plusSeconds(30))
+ .ack(DeliveryAcknowledgement.Full)
+
+ val hub = IoTHub()
+
+ hub
+ .source(java.time.Instant.now())
+ .filter(MessageType("temperature"))
+ .map(deserialize)
+ .filter(_.value > 15)
+ .map(t ⇒ message.to(t.deviceId))
+ .to(hub.sink())
+ .run()
+}
diff --git a/samples-scala/src/main/scala/A_APIUSage/Deserialize.scala b/samples-scala/src/main/scala/A_APIUSage/Deserialize.scala
new file mode 100644
index 0000000..305f84c
--- /dev/null
+++ b/samples-scala/src/main/scala/A_APIUSage/Deserialize.scala
@@ -0,0 +1,16 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package A_APIUSage
+
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import org.json4s.DefaultFormats
+import org.json4s.jackson.JsonMethods.parse
+
+trait Deserialize {
+ def deserialize(m: MessageFromDevice): Temperature = {
+ implicit val formats = DefaultFormats
+ val temperature = parse(m.contentAsString).extract[Temperature]
+ temperature.deviceId = m.deviceId
+ temperature
+ }
+}
diff --git a/samples-scala/src/main/scala/A_OutputMessagesToConsole/ISO8601DateTime.scala b/samples-scala/src/main/scala/A_APIUSage/ISO8601DateTime.scala
similarity index 96%
rename from samples-scala/src/main/scala/A_OutputMessagesToConsole/ISO8601DateTime.scala
rename to samples-scala/src/main/scala/A_APIUSage/ISO8601DateTime.scala
index 99ffb25..7f9d2f3 100644
--- a/samples-scala/src/main/scala/A_OutputMessagesToConsole/ISO8601DateTime.scala
+++ b/samples-scala/src/main/scala/A_APIUSage/ISO8601DateTime.scala
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
-package A_OutputMessagesToConsole
+package A_APIUSage
import java.time.{ZoneId, ZonedDateTime}
diff --git a/samples-scala/src/main/scala/A_APIUSage/Temperature.scala b/samples-scala/src/main/scala/A_APIUSage/Temperature.scala
new file mode 100644
index 0000000..f69cf43
--- /dev/null
+++ b/samples-scala/src/main/scala/A_APIUSage/Temperature.scala
@@ -0,0 +1,15 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package A_APIUSage
+
+/** Temperature measure by a device
+ *
+ * @param value Temperature value measured by the device
+ * @param time Time (as a string) when the device measured the temperature
+ */
+case class Temperature(value: Float, time: String) {
+
+ var deviceId: String = ""
+
+ val datetime = ISO8601DateTime(time)
+}
diff --git a/samples-scala/src/main/scala/A_OutputMessagesToConsole/Demo.scala b/samples-scala/src/main/scala/A_OutputMessagesToConsole/Demo.scala
deleted file mode 100644
index e85a4c7..0000000
--- a/samples-scala/src/main/scala/A_OutputMessagesToConsole/Demo.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-package A_OutputMessagesToConsole
-
-import akka.stream.scaladsl.Sink
-import com.microsoft.azure.iot.iothubreact.filters.Model
-import com.microsoft.azure.iot.iothubreact.scaladsl.{IoTHub, IoTHubPartition}
-import org.json4s._
-import org.json4s.jackson.JsonMethods._
-import com.microsoft.azure.iot.iothubreact.ResumeOnError._
-
-import scala.language.{implicitConversions, postfixOps}
-
-/** Retrieve messages from IoT hub and display the data sent from Temperature devices
- */
-object Demo extends App {
-
- // Source retrieving messages from one IoT hub partition (e.g. partition 2)
- //val messagesFromOnePartition = IoTHubPartition(2).source()
-
- // Source retrieving only recent messages
- //val messagesFromNowOn = IoTHub().source(java.time.Instant.now())
-
- // Source retrieving messages from all IoT hub partitions
- val messagesFromAllPartitions = IoTHub().source()
-
- // Sink printing to the console
- val console = Sink.foreach[Temperature] {
- t ⇒ println(s"Device ${t.deviceId}: temperature: ${t.value}C ; T=${t.datetime}")
- }
-
- // JSON parser setup, brings in default date formats etc.
- implicit val formats = DefaultFormats
-
- // Stream
- messagesFromAllPartitions
- .filter(Model("temperature"))
- .map(m ⇒ {
- val temperature = parse(m.contentAsString).extract[Temperature]
- temperature.deviceId = m.deviceId
- temperature
- })
- .to(console)
- .run()
-}
diff --git a/samples-scala/src/main/scala/B_MessagesThroughput/Demo.scala b/samples-scala/src/main/scala/B_MessagesThroughput/Demo.scala
deleted file mode 100644
index 71479c9..0000000
--- a/samples-scala/src/main/scala/B_MessagesThroughput/Demo.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-package B_MessagesThroughput
-
-import akka.stream.ThrottleMode
-import akka.stream.scaladsl.{Flow, Sink}
-import com.microsoft.azure.iot.iothubreact.IoTMessage
-import com.microsoft.azure.iot.iothubreact.scaladsl.IoTHub
-import com.microsoft.azure.iot.iothubreact.ResumeOnError._
-
-import scala.concurrent.duration._
-import scala.language.postfixOps
-
-/** Retrieve messages from IoT hub managing the stream velocity
- *
- * Demo showing how to:
- * - Measure the streaming throughput
- * - Traffic shaping by throttling the stream speed
- * - How to combine multiple destinations
- * - Back pressure
- */
-object Demo extends App {
-
- // Maximum speed allowed
- val maxSpeed = 200
-
- val showStatsEvery = 1 second
-
- print(s"Do you want to test throttling (${maxSpeed} msg/sec) ? [y/N] ")
- val input = scala.io.StdIn.readLine()
- val throttling = input.size > 0 && input(0).toUpper == 'Y'
-
- // Stream throttling sink
- val throttler = Flow[IoTMessage]
- .throttle(maxSpeed, 1.second, maxSpeed / 10, ThrottleMode.Shaping)
- .to(Sink.ignore)
-
- // Messages throughput monitoring sink
- val monitor = Sink.foreach[IoTMessage] {
- m ⇒ {
- Monitoring.total += 1
- Monitoring.totals(m.partition.get) += 1
- }
- }
-
- // Sink combining throttling and monitoring
- val throttleAndMonitor = Flow[IoTMessage]
- .alsoTo(throttler)
- // .alsoTo(...) // Using alsoTo it's possible to deliver to multiple destinations
- .to(monitor)
-
- // Start processing the stream
- if (throttling) {
- IoTHub().source
- .to(throttleAndMonitor)
- .run()
- } else {
- IoTHub().source
- .to(monitor)
- .run()
- }
-
- // Print statistics at some interval
- Monitoring.printStatisticsWithFrequency(showStatsEvery)
-}
diff --git a/samples-scala/src/main/scala/B_PrintTemperature/Demo.scala b/samples-scala/src/main/scala/B_PrintTemperature/Demo.scala
new file mode 100644
index 0000000..e859acf
--- /dev/null
+++ b/samples-scala/src/main/scala/B_PrintTemperature/Demo.scala
@@ -0,0 +1,51 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package B_PrintTemperature
+
+import akka.stream.scaladsl.Sink
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+import com.microsoft.azure.iot.iothubreact.filters.MessageType
+import com.microsoft.azure.iot.iothubreact.scaladsl._
+import org.json4s._
+import org.json4s.jackson.JsonMethods._
+
+import scala.language.{implicitConversions, postfixOps}
+
+/** Retrieve messages from IoT hub and display the data sent from Temperature devices
+ *
+ * Show how to deserialize content (JSON)
+ */
+object Demo extends App {
+
+ def deserialize(m: MessageFromDevice): Temperature = {
+
+ // JSON parser setup, brings in default date formats etc.
+ implicit val formats = DefaultFormats
+
+ val temperature = parse(m.contentAsString).extract[Temperature]
+ temperature.deviceId = m.deviceId
+ temperature
+ }
+
+ val messages = IoTHub().source()
+
+ // Sink printing to the console
+ val console = Sink.foreach[Temperature] {
+ t ⇒ println(s"Device ${t.deviceId}: temperature: ${t.value}C ; T=${t.datetime}")
+ }
+
+ // Stream
+ messages
+
+ // Equivalent to: m ⇒ m.messageType == "temperature"
+ .filter(MessageType("temperature"))
+
+ // Deserialize JSON
+ .map(m ⇒ deserialize(m))
+
+ // Send Temperature object to the console sink
+ .to(console)
+ .run()
+}
+
diff --git a/samples-scala/src/main/scala/B_PrintTemperature/ISO8601DateTime.scala b/samples-scala/src/main/scala/B_PrintTemperature/ISO8601DateTime.scala
new file mode 100644
index 0000000..526e34e
--- /dev/null
+++ b/samples-scala/src/main/scala/B_PrintTemperature/ISO8601DateTime.scala
@@ -0,0 +1,26 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package B_PrintTemperature
+
+import java.time.{ZoneId, ZonedDateTime}
+
+/** ISO8601 with and without milliseconds decimals
+ *
+ * @param text String date
+ */
+case class ISO8601DateTime(text: String) {
+
+ private val pattern1 = """(\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+).(\d+)Z""".r
+ private val pattern2 = """(\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)Z""".r
+
+ val value: ZonedDateTime = {
+ text match {
+ case pattern1(y, m, d, h, i, s, n) ⇒ ZonedDateTime.of(y.toInt, m.toInt, d.toInt, h.toInt, i.toInt, s.toInt, n.toInt * 1000000, ZoneId.of("UTC"))
+ case pattern2(y, m, d, h, i, s) ⇒ ZonedDateTime.of(y.toInt, m.toInt, d.toInt, h.toInt, i.toInt, s.toInt, 0, ZoneId.of("UTC"))
+ case null ⇒ null
+ case _ ⇒ throw new Exception(s"wrong date time format: $text")
+ }
+ }
+
+ override def toString: String = if (value == null) "" else value.toString
+}
diff --git a/samples-scala/src/main/scala/A_OutputMessagesToConsole/Temperature.scala b/samples-scala/src/main/scala/B_PrintTemperature/Temperature.scala
similarity index 92%
rename from samples-scala/src/main/scala/A_OutputMessagesToConsole/Temperature.scala
rename to samples-scala/src/main/scala/B_PrintTemperature/Temperature.scala
index c74d63b..34d7ae3 100644
--- a/samples-scala/src/main/scala/A_OutputMessagesToConsole/Temperature.scala
+++ b/samples-scala/src/main/scala/B_PrintTemperature/Temperature.scala
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
-package A_OutputMessagesToConsole
+package B_PrintTemperature
import java.time.{ZoneId, ZonedDateTime}
diff --git a/samples-scala/src/main/scala/C_Throughput/Demo.scala b/samples-scala/src/main/scala/C_Throughput/Demo.scala
new file mode 100644
index 0000000..cd828e8
--- /dev/null
+++ b/samples-scala/src/main/scala/C_Throughput/Demo.scala
@@ -0,0 +1,34 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package C_Throughput
+
+import akka.stream.scaladsl.Sink
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+import com.microsoft.azure.iot.iothubreact.scaladsl._
+
+import scala.concurrent.duration._
+import scala.language.postfixOps
+
+/** Measure the streaming throughput
+ */
+object Demo extends App {
+
+ val showStatsEvery = 1 second
+
+ // Messages throughput monitoring sink
+ val monitor = Sink.foreach[MessageFromDevice] {
+ m ⇒ {
+ Monitoring.total += 1
+ Monitoring.totals(m.partition.get) += 1
+ }
+ }
+
+ // Start processing the stream
+ IoTHub().source
+ .to(monitor)
+ .run()
+
+ // Print statistics at some interval
+ Monitoring.printStatisticsWithFrequency(showStatsEvery)
+}
diff --git a/samples-scala/src/main/scala/B_MessagesThroughput/Monitoring.scala b/samples-scala/src/main/scala/C_Throughput/Monitoring.scala
similarity index 92%
rename from samples-scala/src/main/scala/B_MessagesThroughput/Monitoring.scala
rename to samples-scala/src/main/scala/C_Throughput/Monitoring.scala
index ed1b4b3..45243ae 100644
--- a/samples-scala/src/main/scala/B_MessagesThroughput/Monitoring.scala
+++ b/samples-scala/src/main/scala/C_Throughput/Monitoring.scala
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
-package B_MessagesThroughput
+package C_Throughput
import com.typesafe.config.ConfigFactory
@@ -11,12 +11,12 @@ import scala.language.postfixOps
/** Monitoring logic, some properties to keep count and a method to print the
* statistics.
- * Note: for demo readability the monitoring Sink is in the Demo class
+ * Note: for readability the monitoring Sink is in the Demo class
*/
object Monitoring {
// Auxiliary vars
- private[this] val iotHubPartitions = ConfigFactory.load().getInt("iothub.partitions")
+ private[this] val iotHubPartitions = ConfigFactory.load().getInt("iothub-react.connection.partitions")
private[this] var previousTime : Long = 0
private[this] var previousTotal: Long = 0
diff --git a/samples-scala/src/main/scala/D_Throttling/Demo.scala b/samples-scala/src/main/scala/D_Throttling/Demo.scala
new file mode 100644
index 0000000..291a2f1
--- /dev/null
+++ b/samples-scala/src/main/scala/D_Throttling/Demo.scala
@@ -0,0 +1,44 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package D_Throttling
+
+import akka.stream.ThrottleMode
+import akka.stream.scaladsl.{Flow, Sink}
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import com.microsoft.azure.iot.iothubreact.scaladsl._
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+
+import scala.concurrent.duration._
+import scala.language.postfixOps
+
+object Demo extends App {
+
+ val maxSpeed = 100
+
+ // Sink combining throttling and monitoring
+ lazy val throttleAndMonitor = Flow[MessageFromDevice]
+ .alsoTo(throttler)
+ .to(monitor)
+
+ // Stream throttling sink
+ val throttler = Flow[MessageFromDevice]
+ .throttle(maxSpeed, 1.second, maxSpeed / 10, ThrottleMode.Shaping)
+ .to(Sink.ignore)
+
+ // Messages throughput monitoring sink
+ val monitor = Sink.foreach[MessageFromDevice] {
+ m ⇒ {
+ Monitoring.total += 1
+ Monitoring.totals(m.partition.get) += 1
+ }
+ }
+
+ println(s"Streaming messages at ${maxSpeed} msg/sec")
+
+ IoTHub().source
+ .to(throttleAndMonitor)
+ .run()
+
+ // Print statistics at some interval
+ Monitoring.printStatisticsWithFrequency(1 second)
+}
diff --git a/samples-scala/src/main/scala/D_Throttling/Monitoring.scala b/samples-scala/src/main/scala/D_Throttling/Monitoring.scala
new file mode 100644
index 0000000..448b2f9
--- /dev/null
+++ b/samples-scala/src/main/scala/D_Throttling/Monitoring.scala
@@ -0,0 +1,57 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package D_Throttling
+
+import com.typesafe.config.ConfigFactory
+
+import scala.collection.parallel.mutable.ParArray
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.duration.{FiniteDuration, _}
+import scala.language.postfixOps
+
+/** Monitoring logic, some properties to keep count and a method to print the
+ * statistics.
+ * Note: for readability the monitoring Sink is in the Demo class
+ */
+object Monitoring {
+
+ // Auxiliary vars
+ private[this] val iotHubPartitions = ConfigFactory.load().getInt("iothub-react.connection.partitions")
+ private[this] var previousTime : Long = 0
+ private[this] var previousTotal: Long = 0
+
+ // Total count of messages
+ var total: Long = 0
+
+ // Total per partition
+ var totals = new ParArray[Long](iotHubPartitions)
+
+ /* Schedule the stats to be printed with some frequency */
+ def printStatisticsWithFrequency(frequency: FiniteDuration): Unit = {
+ implicit val system = akka.actor.ActorSystem("system")
+ system.scheduler.schedule(1 seconds, frequency)(printStats)
+ }
+
+ /** Print the number of messages received from each partition, the total
+ * and the throughput msg/sec
+ */
+ private[this] def printStats(): Unit = {
+
+ val now = java.time.Instant.now.toEpochMilli
+
+ if (total > 0 && previousTime > 0) {
+
+ print(s"Partitions: ")
+ for (i ← 0 until iotHubPartitions - 1) print(pad5(totals(i)) + ",")
+ print(pad5(totals(iotHubPartitions - 1)))
+
+ val throughput = ((total - previousTotal) * 1000 / (now - previousTime)).toInt
+ println(s" - Total: ${pad5(total)} - Speed: $throughput/sec")
+ }
+
+ previousTotal = total
+ previousTime = now
+ }
+
+ private[this] def pad5(x: Long): String = f"${x}%05d"
+}
diff --git a/samples-scala/src/main/scala/C_Checkpoints/Demo.scala b/samples-scala/src/main/scala/E_Checkpoints/Demo.scala
similarity index 66%
rename from samples-scala/src/main/scala/C_Checkpoints/Demo.scala
rename to samples-scala/src/main/scala/E_Checkpoints/Demo.scala
index 1a9135f..55d9824 100644
--- a/samples-scala/src/main/scala/C_Checkpoints/Demo.scala
+++ b/samples-scala/src/main/scala/E_Checkpoints/Demo.scala
@@ -1,11 +1,12 @@
// Copyright (c) Microsoft. All rights reserved.
-package C_Checkpoints
+package E_Checkpoints
import akka.stream.scaladsl.Sink
-import com.microsoft.azure.iot.iothubreact.IoTMessage
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
import com.microsoft.azure.iot.iothubreact.ResumeOnError._
-import com.microsoft.azure.iot.iothubreact.scaladsl.IoTHub
+import com.microsoft.azure.iot.iothubreact.filters.Device
+import com.microsoft.azure.iot.iothubreact.scaladsl._
/** Retrieve messages from IoT hub and save the current position
* In case of restart the stream starts from where it left
@@ -16,14 +17,13 @@ import com.microsoft.azure.iot.iothubreact.scaladsl.IoTHub
*/
object Demo extends App {
- // Sink printing to the console
- val console = Sink.foreach[IoTMessage] {
+ val console = Sink.foreach[MessageFromDevice] {
t ⇒ println(s"Message from ${t.deviceId} - Time: ${t.created}")
}
- // Stream
+ // Stream using checkpointing
IoTHub().source(withCheckpoints = true)
- .filter(m ⇒ m.deviceId == "device1000")
+ .filter(Device("device1000"))
.to(console)
.run()
}
diff --git a/samples-scala/src/main/scala/F_SendMessageToDevice/Demo.scala b/samples-scala/src/main/scala/F_SendMessageToDevice/Demo.scala
new file mode 100644
index 0000000..e512383
--- /dev/null
+++ b/samples-scala/src/main/scala/F_SendMessageToDevice/Demo.scala
@@ -0,0 +1,59 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package F_SendMessageToDevice
+
+import akka.stream.scaladsl.Flow
+import com.microsoft.azure.iot.iothubreact.MessageToDevice
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+import com.microsoft.azure.iot.iothubreact.filters.MessageType
+import com.microsoft.azure.iot.iothubreact.scaladsl._
+
+import scala.language.{implicitConversions, postfixOps}
+
+object Demo extends App with Deserialize {
+
+ val turnFanOn = MessageToDevice("Turn fan ON")
+ val turnFanOff = MessageToDevice("Turn fan OFF")
+
+ val hub = IoTHub()
+
+ // Source
+ val temperatures = hub
+ .source()
+ .filter(MessageType("temperature"))
+ .map(deserialize)
+
+ // Too cold sink
+ val tooColdWorkflow = Flow[Temperature]
+ .filter(_.value < 65)
+ .map(t ⇒ turnFanOff.to(t.deviceId))
+ .to(hub.sink())
+
+ // Too warm sink
+ val tooWarmWorkflow = Flow[Temperature]
+ .filter(_.value > 85)
+ .map(t ⇒ turnFanOn.to(t.deviceId))
+ .to(hub.sink())
+
+ temperatures
+ .alsoTo(tooColdWorkflow)
+ .to(tooWarmWorkflow)
+ .run()
+
+ /*
+ // Run the two workflows in parallel
+ RunnableGraph.fromGraph(GraphDSL.create() {
+ implicit b =>
+ import GraphDSL.Implicits._
+
+ val shape = b.add(Broadcast[Temperature](2))
+
+ temperatures ~> shape.in
+
+ shape.out(0) ~> tooColdWorkflow
+ shape.out(1) ~> tooWarmWorkflow
+
+ ClosedShape
+ }).run()
+ */
+}
diff --git a/samples-scala/src/main/scala/F_SendMessageToDevice/Deserialize.scala b/samples-scala/src/main/scala/F_SendMessageToDevice/Deserialize.scala
new file mode 100644
index 0000000..730117b
--- /dev/null
+++ b/samples-scala/src/main/scala/F_SendMessageToDevice/Deserialize.scala
@@ -0,0 +1,16 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package F_SendMessageToDevice
+
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import org.json4s.DefaultFormats
+import org.json4s.jackson.JsonMethods.parse
+
+trait Deserialize {
+ def deserialize(m: MessageFromDevice): Temperature = {
+ implicit val formats = DefaultFormats
+ val temperature = parse(m.contentAsString).extract[Temperature]
+ temperature.deviceId = m.deviceId
+ temperature
+ }
+}
diff --git a/samples-scala/src/main/scala/F_SendMessageToDevice/ISO8601DateTime.scala b/samples-scala/src/main/scala/F_SendMessageToDevice/ISO8601DateTime.scala
new file mode 100644
index 0000000..a9529d0
--- /dev/null
+++ b/samples-scala/src/main/scala/F_SendMessageToDevice/ISO8601DateTime.scala
@@ -0,0 +1,26 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package F_SendMessageToDevice
+
+import java.time.{ZoneId, ZonedDateTime}
+
+/** ISO8601 with and without milliseconds decimals
+ *
+ * @param text String date
+ */
+case class ISO8601DateTime(text: String) {
+
+ private val pattern1 = """(\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+).(\d+)Z""".r
+ private val pattern2 = """(\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)Z""".r
+
+ val value: ZonedDateTime = {
+ text match {
+ case pattern1(y, m, d, h, i, s, n) ⇒ ZonedDateTime.of(y.toInt, m.toInt, d.toInt, h.toInt, i.toInt, s.toInt, n.toInt * 1000000, ZoneId.of("UTC"))
+ case pattern2(y, m, d, h, i, s) ⇒ ZonedDateTime.of(y.toInt, m.toInt, d.toInt, h.toInt, i.toInt, s.toInt, 0, ZoneId.of("UTC"))
+ case null ⇒ null
+ case _ ⇒ throw new Exception(s"wrong date time format: $text")
+ }
+ }
+
+ override def toString: String = if (value == null) "" else value.toString
+}
diff --git a/samples-scala/src/main/scala/F_SendMessageToDevice/Temperature.scala b/samples-scala/src/main/scala/F_SendMessageToDevice/Temperature.scala
new file mode 100644
index 0000000..6e72992
--- /dev/null
+++ b/samples-scala/src/main/scala/F_SendMessageToDevice/Temperature.scala
@@ -0,0 +1,15 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package F_SendMessageToDevice
+
+/** Temperature measure by a device
+ *
+ * @param value Temperature value measured by the device
+ * @param time Time (as a string) when the device measured the temperature
+ */
+case class Temperature(value: Float, time: String) {
+
+ var deviceId: String = ""
+
+ val datetime = ISO8601DateTime(time)
+}
diff --git a/samples-scala/src/main/scala/OSN/Demo1_DSL.scala b/samples-scala/src/main/scala/OSN/Demo1_DSL.scala
new file mode 100644
index 0000000..8c66350
--- /dev/null
+++ b/samples-scala/src/main/scala/OSN/Demo1_DSL.scala
@@ -0,0 +1,30 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package OSN.Demo.Simple
+
+import akka.stream.scaladsl.Sink
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import com.microsoft.azure.iot.iothubreact.scaladsl.IoTHub
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+
+object Console {
+
+ def apply() = Sink.foreach[MessageFromDevice] {
+
+ m ⇒ println(
+ s"${m.created} - ${m.deviceId} - ${m.messageType}"
+ + s" - ${m.contentAsString}")
+
+ }
+}
+
+object Demo extends App {
+
+ IoTHub()
+
+ .source()
+
+ .to(Console())
+
+ .run()
+}
diff --git a/samples-scala/src/main/scala/OSN/Demo2_API.scala b/samples-scala/src/main/scala/OSN/Demo2_API.scala
new file mode 100644
index 0000000..8da8836
--- /dev/null
+++ b/samples-scala/src/main/scala/OSN/Demo2_API.scala
@@ -0,0 +1,48 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package OSN.Demo.More
+
+import akka.stream.scaladsl.Sink
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import com.microsoft.azure.iot.iothubreact.filters.{Device, MessageType}
+import com.microsoft.azure.iot.iothubreact.scaladsl.IoTHub
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+
+object Console {
+
+ def apply() = Sink.foreach[MessageFromDevice] {
+
+ m ⇒ println(
+ s"${m.created} - ${m.deviceId} - ${m.messageType}"
+ + s" - ${m.contentAsString}")
+
+ }
+}
+
+object Storage {
+
+ def apply() = Sink.foreach[MessageFromDevice] {
+
+ m ⇒ {
+ /* ... write to storage ... */
+ }
+
+ }
+}
+
+object Demo extends App {
+
+ IoTHub()
+
+ .source(java.time.Instant.now()) // <===
+
+ .filter(MessageType("temperature")) // <===
+
+ .filter(Device("device1000")) // <===
+
+ .alsoTo(Storage()) // <===
+
+ .to(Console())
+
+ .run()
+}
diff --git a/samples-scala/src/main/scala/OSN/Demo3_Checkpoints.scala b/samples-scala/src/main/scala/OSN/Demo3_Checkpoints.scala
new file mode 100644
index 0000000..e08f993
--- /dev/null
+++ b/samples-scala/src/main/scala/OSN/Demo3_Checkpoints.scala
@@ -0,0 +1,35 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package OSN.Demo.Checkpoints
+
+import akka.stream.scaladsl.Sink
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import com.microsoft.azure.iot.iothubreact.filters.{Device, MessageType}
+import com.microsoft.azure.iot.iothubreact.scaladsl.IoTHub
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+
+object Console {
+
+ def apply() = Sink.foreach[MessageFromDevice] {
+
+ m ⇒ println(
+ s"${m.created} - ${m.deviceId} - ${m.messageType}"
+ + s" - ${m.contentAsString}")
+
+ }
+}
+
+object Demo extends App {
+
+ IoTHub()
+
+ .source(withCheckpoints = true) // <===
+
+ .filter(MessageType("temperature"))
+
+ .filter(Device("device1000"))
+
+ .to(Console())
+
+ .run()
+}
diff --git a/src/main/resources/reference.conf b/src/main/resources/reference.conf
index be98d00..b283312 100644
--- a/src/main/resources/reference.conf
+++ b/src/main/resources/reference.conf
@@ -1,85 +1,92 @@
// Configuration file [HOCON format]
-// IoT Hub settings can be retrieved from the Azure portal at https://portal.azure.com
-iothub {
+iothub-react {
- // see: "IoT Hub" >> your hub >> "Messaging" >> "Partitions"
- partitions = ${?IOTHUB_PARTITIONS}
+ // Connection settings can be retrieved from the Azure portal at https://portal.azure.com
+ // For more information about IoT Hub settings, see:
+ // https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-create-through-portal#endpoints
+ // https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-java-java-getstarted
+ connection {
+ // see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible name"
+ hubName = ${?IOTHUB_EVENTHUB_NAME}
- // see: "IoT Hub" >> your hub >> "Messaging" >> "Event Hub-compatible name"
- name = ${?IOTHUB_NAME}
+ // see: Endpoints ⇒ Messaging ⇒ Events ⇒ "Event Hub-compatible endpoint"
+ hubEndpoint = ${?IOTHUB_EVENTHUB_ENDPOINT}
- // see: "IoT Hub" >> your hub > "Messaging" >> "Event Hub-compatible endpoint"
- // e.g. from "sb://iothub-ns-toketi-i-18552-16281e72ba.servicebus.windows.net/"
- // use "iothub-ns-toketi-i-18552-16281e72ba"
- namespace = ${?IOTHUB_NAMESPACE}
+ // see: Endpoints ⇒ Messaging ⇒ Events ⇒ Partitions
+ hubPartitions = ${?IOTHUB_EVENTHUB_PARTITIONS}
- // see: "IoT Hub" >> your hub >> "Shared access policies"
- // e.g. you could use the predefined "iothubowner"
- keyName = ${?IOTHUB_ACCESS_KEY_NAME}
+ // see: "IoT Hub" ⇒ your hub ⇒ "Shared access policies"
+ // e.g. you should use the predefined "service" policy
+ accessPolicy = ${?IOTHUB_ACCESS_POLICY}
- // see: "IoT Hub" >> your hub >> "Shared access policies" >> key name >> "Primary key"
- key = ${?IOTHUB_ACCESS_KEY_VALUE}
+ // see: Shared access policies ⇒ key name ⇒ Primary key
+ accessKey = ${?IOTHUB_ACCESS_KEY}
- // see: "IoT Hub" >> your hub > "Messaging" >> Consumer groups
- // "$Default" is predefined and is the typical scenario
- consumerGroup = "$Default"
-}
+ // see: Shared access policies ⇒ key name ⇒ Connection string ⇒ "HostName"
+ accessHostName = ${?IOTHUB_ACCESS_HOSTNAME}
+ }
-iothub-stream {
- // Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
- receiverTimeout = 3s
+ streaming {
+ // see: "IoT Hub" >> your hub > "Messaging" >> Consumer groups
+ // "$Default" is predefined and is the typical scenario
+ consumerGroup = "$Default"
- // How many messages to retrieve on each pull, max is 999
- receiverBatchSize = 999
-}
-
-// IoT hub stream checkpointing options
-iothub-checkpointing {
- // Whether the checkpointing feature is enabled
- enabled = false
-
- // Checkpoints frequency (best effort), for each IoT hub partition
- // Min: 1 second, Max: 1 minute
- frequency = 15s
-
- // How many messages to stream before saving the position, for each IoT hub partition.
- // Since the stream position is saved in the Source, before the rest of the
- // Graph (Flows/Sinks), this provides a mechanism to replay buffered messages.
- // The value should be greater than receiverBatchSize
- countThreshold = 2000
-
- // Store a position if its value is older than this amount of time, ignoring the threshold.
- // For instance when the telemetry stops, this will force to write the last offset after some time.
- // Min: 1 second, Max: 1 hour. Value is rounded to seconds.
- timeThreshold = 5min
-
- storage {
+ // How many messages to retrieve on each pull, max is 999
+ receiverBatchSize = 999
// Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
- rwTimeout = 5s
+ receiverTimeout = 5s
+ }
- backendType = "AzureBlob"
+ checkpointing {
- // If you use the same storage while processing multiple streams, you'll want
- // to use a distinct table/container/path in each job, to to keep state isolated
- namespace = "iothub-react-checkpoints"
+ // Whether the checkpointing feature is enabled
+ enabled = false
- // com.microsoft.azure.iot.iothubreact.checkpointing.Backends.AzureBlob
- azureblob {
- // Time allowed for a checkpoint to be written, rounded to seconds (min 15, max 60)
- lease = 15s
- // Whether to use the Azure Storage Emulator
- useEmulator = false
- // Storage credentials
- protocol = "https"
- account = ${?IOTHUB_CHECKPOINT_ACCOUNT}
- key = ${?IOTHUB_CHECKPOINT_KEY}
- }
+ // Checkpoints frequency (best effort), for each IoT hub partition
+ // Min: 1 second, Max: 1 minute
+ frequency = 15s
- cassandra {
- cluster = "localhost:9042"
- replicationFactor = 1
+ // How many messages to stream before saving the position, for each IoT hub partition.
+ // Since the stream position is saved in the Source, before the rest of the
+ // Graph (Flows/Sinks), this provides a mechanism to replay buffered messages.
+ // The value should be greater than receiverBatchSize
+ countThreshold = 2000
+
+ // Store a position if its value is older than this amount of time, ignoring the threshold.
+ // For instance when the telemetry stops, this will force to write the last offset after some time.
+ // Min: 1 second, Max: 1 hour. Value is rounded to seconds.
+ timeThreshold = 5min
+
+ storage {
+
+ // Value expressed as a duration, e.g. 3s, 3000ms, "3 seconds", etc.
+ rwTimeout = 5s
+
+ // Supported types (not case sensitive): Cassandra, AzureBlob
+ backendType = "AzureBlob"
+
+ // If you use the same storage while processing multiple streams, you'll want
+ // to use a distinct table/container/path in each job, to to keep state isolated
+ namespace = "iothub-react-checkpoints"
+
+ // com.microsoft.azure.iot.iothubreact.checkpointing.Backends.AzureBlob
+ azureblob {
+ // Time allowed for a checkpoint to be written, rounded to seconds (min 15, max 60)
+ lease = 15s
+ // Whether to use the Azure Storage Emulator
+ useEmulator = false
+ // Storage credentials
+ protocol = "https"
+ account = ${?IOTHUB_CHECKPOINT_ACCOUNT}
+ key = ${?IOTHUB_CHECKPOINT_KEY}
+ }
+
+ cassandra {
+ cluster = "localhost:9042"
+ replicationFactor = 1
+ }
}
}
}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/Configuration.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/Configuration.scala
index 16a802a..73b589f 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/Configuration.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/Configuration.scala
@@ -12,52 +12,58 @@ import scala.language.postfixOps
/** Hold IoT Hub configuration settings
*
- * @see https://github.com/typesafehub/config for information about the
- * configuration file formats
- * @todo dependency injection
+ * @see https://github.com/typesafehub/config for information about the configuration file formats
*/
private[iothubreact] object Configuration {
+ // TODO: dependency injection
+
+ private[this] val confConnPath = "iothub-react.connection."
+ private[this] val confStreamingPath = "iothub-react.streaming."
+
// Maximum size supported by the client
private[this] val MaxBatchSize = 999
// Default IoThub client timeout
private[this] val DefaultReceiverTimeout = 3 seconds
- private[this] val conf: Config = ConfigFactory.load()
+ private[this] lazy val conf: Config = ConfigFactory.load()
// IoT hub storage details
- val iotHubPartitions: Int = conf.getInt("iothub.partitions")
- val iotHubNamespace : String = conf.getString("iothub.namespace")
- val iotHubName : String = conf.getString("iothub.name")
- val iotHubKeyName : String = conf.getString("iothub.keyName")
- val iotHubKey : String = conf.getString("iothub.key")
+ lazy val iotHubName : String = conf.getString(confConnPath + "hubName")
+ lazy val iotHubNamespace : String = getNamespaceFromEndpoint(conf.getString(confConnPath + "hubEndpoint"))
+ lazy val iotHubPartitions: Int = conf.getInt(confConnPath + "hubPartitions")
+ lazy val accessPolicy : String = conf.getString(confConnPath + "accessPolicy")
+ lazy val accessKey : String = conf.getString(confConnPath + "accessKey")
+ lazy val accessHostname : String = conf.getString(confConnPath + "accessHostName")
// Consumer group used to retrieve messages
// @see https://azure.microsoft.com/en-us/documentation/articles/event-hubs-overview
- private[this] val tmpCG = conf.getString("iothub.consumerGroup")
- val receiverConsumerGroup: String =
- tmpCG match {
- case "$Default" ⇒ EventHubClient.DEFAULT_CONSUMER_GROUP_NAME
- case "Default" ⇒ EventHubClient.DEFAULT_CONSUMER_GROUP_NAME
- case "default" ⇒ EventHubClient.DEFAULT_CONSUMER_GROUP_NAME
+ lazy private[this] val tmpCG = conf.getString(confStreamingPath + "consumerGroup")
+ lazy val receiverConsumerGroup: String =
+ tmpCG.toUpperCase match {
+ case "$DEFAULT" ⇒ EventHubClient.DEFAULT_CONSUMER_GROUP_NAME
case "DEFAULT" ⇒ EventHubClient.DEFAULT_CONSUMER_GROUP_NAME
case _ ⇒ tmpCG
}
// Message retrieval timeout in milliseconds
- private[this] val tmpRTO = conf.getDuration("iothub-stream.receiverTimeout").toMillis
- val receiverTimeout: FiniteDuration =
+ lazy private[this] val tmpRTO = conf.getDuration(confStreamingPath + "receiverTimeout").toMillis
+ lazy val receiverTimeout: FiniteDuration =
if (tmpRTO > 0)
FiniteDuration(tmpRTO, TimeUnit.MILLISECONDS)
else
DefaultReceiverTimeout
// How many messages to retrieve on each call to the storage
- private[this] val tmpRBS = conf.getInt("iothub-stream.receiverBatchSize")
- val receiverBatchSize: Int =
+ lazy private[this] val tmpRBS = conf.getInt(confStreamingPath + "receiverBatchSize")
+ lazy val receiverBatchSize: Int =
if (tmpRBS > 0 && tmpRBS <= MaxBatchSize)
tmpRBS
else
MaxBatchSize
+
+ private[this] def getNamespaceFromEndpoint(endpoint: String): String = {
+ endpoint.replaceFirst(".*://", "").replaceFirst("\\..*", "")
+ }
}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/DeviceProperties.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/DeviceProperties.scala
new file mode 100644
index 0000000..8dff593
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/DeviceProperties.scala
@@ -0,0 +1,9 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact
+
+/** Set of properties to set on the device twin
+ */
+class DeviceProperties {
+
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/IoTHubStorage.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/IoTHubStorage.scala
index acaec49..becbedc 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/IoTHubStorage.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/IoTHubStorage.scala
@@ -10,14 +10,14 @@ private object IoTHubStorage extends Logger {
private[this] val connString = new ConnectionStringBuilder(
Configuration.iotHubNamespace,
Configuration.iotHubName,
- Configuration.iotHubKeyName,
- Configuration.iotHubKey).toString
+ Configuration.accessPolicy,
+ Configuration.accessKey).toString
- // @todo Manage transient errors e.g. timeouts
+ // TODO: Manage transient errors e.g. timeouts
// EventHubClient.createFromConnectionString(connString)
// .get(Configuration.receiverTimeout, TimeUnit.MILLISECONDS)
def createClient(): EventHubClient = {
- log.debug(s"Creating EventHub client to ${Configuration.iotHubName}")
+ log.info(s"Creating EventHub client to ${Configuration.iotHubName}")
EventHubClient.createFromConnectionStringSync(connString)
}
}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/IoTMessage.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/IoTMessage.scala
deleted file mode 100644
index cc6d86b..0000000
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/IoTMessage.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-package com.microsoft.azure.iot.iothubreact
-
-import java.time.Instant
-import java.util
-
-import com.microsoft.azure.eventhubs.EventData
-
-/* IoTMessage factory */
-private object IoTMessage {
-
- /** Create a user friendly representation of the IoT message from the raw
- * data coming from the storage
- *
- * @param rawData Raw data retrieved from the IoT hub storage
- * @param partition Storage partition where the message was retrieved from
- *
- * @return
- */
- def apply(rawData: EventData, partition: Option[Int]): IoTMessage = {
- new IoTMessage(Some(rawData), partition)
- }
-}
-
-/** Expose the IoT message body and timestamp
- *
- * @param partition Storage partition where the message was retrieved from
- */
-class IoTMessage(data: Option[EventData], val partition: Option[Int]) {
-
- // Internal properties set by IoT stoage
- private[this] lazy val systemProps = data.get.getSystemProperties()
-
- // Meta properties set by the device
- // Note: empty when using MQTT
- lazy val properties: util.Map[String, String] = data.get.getProperties()
-
- // Whether this is a keep alive message generated by the stream and not by IoT hub
- val isKeepAlive: Boolean = (partition == None)
-
- // Content type, e.g. how to interpret/deserialize the content
- // Note: empty when using MQTT
- lazy val model: String = properties.getOrDefault("model", "")
-
- /** Time when the message was received by IoT hub service
- * Note that this might differ from the time set by the device, e.g. in case
- * of batch uploads
- */
- lazy val created: Instant = systemProps.getEnqueuedTime
-
- /** IoT message offset
- * Useful for example to store the current position in the stream
- */
- lazy val offset: String = systemProps.getOffset
-
- // IoT message sequence number
- lazy val sequenceNumber: Long = systemProps.getSequenceNumber
-
- // ID of the device who sent the message
- lazy val deviceId: String = systemProps.get("iothub-connection-device-id").toString
-
- // IoT message content bytes
- lazy val content: Array[Byte] = data.get.getBody
-
- // IoT message content as string, e.g. JSON/XML/etc.
- lazy val contentAsString: String = new String(content)
-}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/Logger.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/Logger.scala
index dd68ea0..b24e3bd 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/Logger.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/Logger.scala
@@ -5,6 +5,10 @@ package com.microsoft.azure.iot.iothubreact
import akka.actor.ActorSystem
import akka.event.{LogSource, Logging}
+private[iothubreact] object Logger {
+ val actorSystem = ActorSystem("IoTHubReact")
+}
+
/** Common logger via Akka
*
* @see http://doc.akka.io/docs/akka/2.4.10/scala/logging.html
@@ -17,5 +21,5 @@ private[iothubreact] trait Logger {
override def getClazz(o: AnyRef): Class[_] = o.getClass
}
- val log = Logging(ActorSystem("IoTHubReact"), this)
+ val log = Logging(Logger.actorSystem, this)
}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/MessageFromDevice.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/MessageFromDevice.scala
new file mode 100644
index 0000000..11737b8
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/MessageFromDevice.scala
@@ -0,0 +1,83 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact
+
+import java.time.Instant
+import java.util
+
+import com.microsoft.azure.eventhubs.EventData
+import com.microsoft.azure.servicebus.amqp.AmqpConstants
+
+/* MessageFromDevice factory */
+private object MessageFromDevice {
+
+ /** Create a user friendly representation of the IoT message from the raw
+ * data coming from the storage
+ *
+ * @param rawData Raw data retrieved from the IoT hub storage
+ * @param partition Storage partition where the message was retrieved from
+ *
+ * @return
+ */
+ def apply(rawData: EventData, partition: Option[Int]): MessageFromDevice = {
+ new MessageFromDevice(Some(rawData), partition)
+ }
+}
+
+/** Expose the IoT device message body and timestamp
+ *
+ * @param partition Storage partition where the message was retrieved from
+ */
+class MessageFromDevice(data: Option[EventData], val partition: Option[Int]) {
+
+ // TODO: test properties over all protocols
+ // TODO: use system property for Content Type and Message Type once available in Azure SDK
+
+ // NOTE: this should become a system property in future versions of Azure SDKs
+ // contentTypeProperty = AmqpConstants.AMQP_PROPERTY_CONTENT_TYPE
+ private[iothubreact] val contentTypeProperty = "$$contentType"
+
+ // NOTE: this should become a system property in future versions of Azure SDKs
+ private[iothubreact] val messageTypeProperty = "$$messageType"
+
+ private[iothubreact] val messageIdProperty = AmqpConstants.AMQP_PROPERTY_MESSAGE_ID
+
+ private[iothubreact] val deviceIdProperty = "iothub-connection-device-id"
+
+ // Internal properties set by IoT stoage
+ private[this] lazy val systemProps = data.get.getSystemProperties()
+
+ // Meta properties set by the device
+ lazy val properties: util.Map[String, String] = data.get.getProperties()
+
+ // Whether this is a keep alive message generated by the stream and not by IoT hub
+ val isKeepAlive: Boolean = (partition == None)
+
+ // Message type, the class to use to map the payload
+ lazy val messageType: String = properties.getOrDefault(messageTypeProperty, "")
+
+ // Content type, e.g. JSON/Protobuf/Bond etc.
+ // contentType = data.get.getSystemProperties.get(contentTypeProperty)
+ lazy val contentType = properties.getOrDefault(contentTypeProperty, "")
+
+ // Time when the message was received by IoT hub service. *NOT* the device time.
+ lazy val created: Instant = systemProps.getEnqueuedTime
+
+ // IoT message offset, useful to store the current position in the stream
+ lazy val offset: String = systemProps.getOffset
+
+ // IoT message sequence number
+ lazy val sequenceNumber: Long = systemProps.getSequenceNumber
+
+ // ID of the device who sent the message
+ lazy val deviceId: String = systemProps.get(deviceIdProperty).toString
+
+ // Message ID
+ lazy val messageId: String = systemProps.get(messageIdProperty).toString
+
+ // IoT message content bytes
+ lazy val content: Array[Byte] = data.get.getBody
+
+ // IoT message content as string, e.g. JSON/XML/etc.
+ lazy val contentAsString: String = new String(content)
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/IoTMessageSource.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/MessageFromDeviceSource.scala
similarity index 68%
rename from src/main/scala/com/microsoft/azure/iot/iothubreact/IoTMessageSource.scala
rename to src/main/scala/com/microsoft/azure/iot/iothubreact/MessageFromDeviceSource.scala
index c817d99..4ba6045 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/IoTMessageSource.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/MessageFromDeviceSource.scala
@@ -5,6 +5,7 @@ package com.microsoft.azure.iot.iothubreact
import java.time.Instant
import akka.NotUsed
+import akka.stream.impl.fusing.GraphInterpreter
import akka.stream.scaladsl.Source
import akka.stream.stage.{GraphStage, GraphStageLogic, OutHandler}
import akka.stream.{Attributes, Outlet, SourceShape}
@@ -14,7 +15,7 @@ import scala.collection.JavaConverters._
import scala.concurrent.duration._
import scala.language.{implicitConversions, postfixOps}
-private object IoTMessageSource {
+private object MessageFromDeviceSource {
/** Create an instance of the messages source for the specified partition
*
@@ -24,8 +25,8 @@ private object IoTMessageSource {
* @return A source returning the body of the message sent from a device.
* Deserialization is left to the consumer.
*/
- def apply(partition: Int, offset: String, withCheckpoints: Boolean): Source[IoTMessage, NotUsed] = {
- Source.fromGraph(new IoTMessageSource(partition, offset, withCheckpoints))
+ def apply(partition: Int, offset: String, withCheckpoints: Boolean): Source[MessageFromDevice, NotUsed] = {
+ Source.fromGraph(new MessageFromDeviceSource(partition, offset, withCheckpoints))
}
/** Create an instance of the messages source for the specified partition
@@ -36,17 +37,17 @@ private object IoTMessageSource {
* @return A source returning the body of the message sent from a device.
* Deserialization is left to the consumer.
*/
- def apply(partition: Int, startTime: Instant, withCheckpoints: Boolean): Source[IoTMessage, NotUsed] = {
- Source.fromGraph(new IoTMessageSource(partition, startTime, withCheckpoints))
+ def apply(partition: Int, startTime: Instant, withCheckpoints: Boolean): Source[MessageFromDevice, NotUsed] = {
+ Source.fromGraph(new MessageFromDeviceSource(partition, startTime, withCheckpoints))
}
}
/** Source of messages from one partition of the IoT hub storage
- *
- * @todo Refactor and use async methods, compare performance
- * @todo Consider option to deserialize on the fly to [T], assuming JSON format
*/
-private class IoTMessageSource() extends GraphStage[SourceShape[IoTMessage]] with Logger {
+private class MessageFromDeviceSource() extends GraphStage[SourceShape[MessageFromDevice]] with Logger {
+
+ // TODO: Refactor and use async methods, compare performance
+ // TODO: Consider option to deserialize on the fly to [T], when JSON Content Type
abstract class OffsetType
@@ -102,10 +103,10 @@ private class IoTMessageSource() extends GraphStage[SourceShape[IoTMessage]] wit
}
// Define the (sole) output port of this stage
- private[this] val out: Outlet[IoTMessage] = Outlet("IoTMessageSource")
+ private[this] val out: Outlet[MessageFromDevice] = Outlet("MessageFromDeviceSource")
// Define the shape of this stage => SourceShape with the port defined above
- override val shape: SourceShape[IoTMessage] = SourceShape(out)
+ override val shape: SourceShape[MessageFromDevice] = SourceShape(out)
// All state MUST be inside the GraphStageLogic, never inside the enclosing
// GraphStage. This state is safe to read/write from all the callbacks
@@ -114,41 +115,50 @@ private class IoTMessageSource() extends GraphStage[SourceShape[IoTMessage]] wit
log.debug(s"Creating the IoT hub source")
new GraphStageLogic(shape) {
- val keepAliveSignal = new IoTMessage(None, None)
- val emptyResult = List[IoTMessage](keepAliveSignal)
+ val keepAliveSignal = new MessageFromDevice(None, None)
+ val emptyResult = List[MessageFromDevice](keepAliveSignal)
- lazy val receiver = getIoTHubReceiver
+ lazy val receiver = getIoTHubReceiver()
- setHandler(out, new OutHandler {
- log.debug(s"Defining the output handler")
+ setHandler(
+ out, new OutHandler {
+ log.debug(s"Defining the output handler")
- override def onPull(): Unit = {
- try {
- val messages = Retry(2, 1 seconds) {
- receiver.receiveSync(Configuration.receiverBatchSize)
- }
+ override def onPull(): Unit = {
+ try {
+ val messages = Retry(2, 1 seconds) {
+ receiver.receiveSync(Configuration.receiverBatchSize)
+ }
- if (messages == null) {
- log.debug(s"Partition ${partition} is empty")
- emitMultiple(out, emptyResult)
- } else {
- val iterator = messages.asScala.map(e ⇒ IoTMessage(e, Some(partition))).toList
- emitMultiple(out, iterator)
- }
- } catch {
- case e: Exception ⇒ {
- log.error(e, "Fatal error: " + e.getMessage)
+ if (messages == null) {
+ log.debug(s"Partition ${partition} is empty")
+ emitMultiple(out, emptyResult)
+ } else {
+ val iterator = messages.asScala.map(e ⇒ MessageFromDevice(e, Some(partition))).toList
+ log.debug(s"Emitting ${iterator.size} messages")
+ emitMultiple(out, iterator)
+ }
+ } catch {
+ case e: Exception ⇒ {
+ log.error(e, "Fatal error: " + e.getMessage)
+ }
}
}
- }
- })
+
+ override def onDownstreamFinish(): Unit = {
+ super.onDownstreamFinish()
+ log.info(s"Closing partition ${partition} receiver")
+ receiver.closeSync()
+ }
+ })
/** Connect to the IoT hub storage
*
* @return IoT hub storage receiver
*/
- def getIoTHubReceiver: PartitionReceiver = Retry(3, 2 seconds) {
+ def getIoTHubReceiver(): PartitionReceiver = Retry(3, 2 seconds) {
offsetType match {
+
case SequenceOffset ⇒ {
log.info(s"Connecting to partition ${partition.toString} starting from offset '${offset}'")
IoTHubStorage
@@ -159,7 +169,8 @@ private class IoTMessageSource() extends GraphStage[SourceShape[IoTMessage]] wit
offset,
OffsetInclusive)
}
- case TimeOffset ⇒ {
+
+ case TimeOffset ⇒ {
log.info(s"Connecting to partition ${partition.toString} starting from time '${startTime}'")
IoTHubStorage
.createClient()
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/MessageToDevice.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/MessageToDevice.scala
new file mode 100644
index 0000000..4ce83be
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/MessageToDevice.scala
@@ -0,0 +1,143 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact
+
+import java.nio.charset.StandardCharsets
+import java.time.Instant
+import java.util.Date
+
+import com.microsoft.azure.iot.service.sdk.{DeliveryAcknowledgement, Message}
+
+import scala.collection.JavaConverters._
+import scala.collection.mutable.Map
+
+object MessageToDevice {
+
+ def apply(content: Array[Byte]) = new MessageToDevice(content)
+
+ def apply(content: String) = new MessageToDevice(content)
+
+ def apply(deviceId: String, content: Array[Byte]) = new MessageToDevice(deviceId, content)
+
+ def apply(deviceId: String, content: String) = new MessageToDevice(deviceId, content)
+}
+
+class MessageToDevice(var deviceId: String, val content: Array[Byte]) {
+
+ private[this] var ack : DeliveryAcknowledgement = DeliveryAcknowledgement.None
+ private[this] var correlationId: Option[String] = None
+ private[this] var expiry : Option[java.util.Date] = None
+ private[this] var userId : Option[String] = None
+ private[this] var properties : Option[Map[String, String]] = None
+
+ def this(content: String) = {
+ this("", content.getBytes(StandardCharsets.UTF_8))
+ }
+
+ def this(content: Array[Byte]) = {
+ this("", content)
+ }
+
+ def this(deviceId: String, content: String) = {
+ this(deviceId, content.getBytes(StandardCharsets.UTF_8))
+ }
+
+ /** Set the acknowledgement level for message delivery: None, NegativeOnly, PositiveOnly, Full
+ *
+ * @param ack Acknowledgement level
+ *
+ * @return The object for method chaining
+ */
+ def ack(ack: DeliveryAcknowledgement): MessageToDevice = {
+ this.ack = ack
+ this
+ }
+
+ /** Set the device ID
+ *
+ * @param deviceId Device ID
+ *
+ * @return The object for method chaining
+ */
+ def to(deviceId: String): MessageToDevice = {
+ this.deviceId = deviceId
+ this
+ }
+
+ /** Set the correlation ID, used in message responses and feedback
+ *
+ * @param correlationId Correlation ID
+ *
+ * @return The object for method chaining
+ */
+ def correlationId(correlationId: String): MessageToDevice = {
+ this.correlationId = Option(correlationId)
+ this
+ }
+
+ /** Set the expiration time in UTC, interpreted by hub on C2D messages
+ *
+ * @param expiry UTC expiration time
+ *
+ * @return The object for method chaining
+ */
+ def expiry(expiry: Instant): MessageToDevice = {
+ this.expiry = Some(new Date(expiry.toEpochMilli))
+ this
+ }
+
+ /** Set the user ID, used to specify the origin of messages generated by device hub
+ *
+ * @param userId User ID
+ *
+ * @return The object for method chaining
+ */
+ def userId(userId: String): MessageToDevice = {
+ this.userId = Some(userId)
+ this
+ }
+
+ /** Replace the current set of properties with a new set
+ *
+ * @param properties Set of properties
+ *
+ * @return The object for method chaining
+ */
+ def properties(properties: Map[String, String]): MessageToDevice = {
+ this.properties = Option(properties)
+ this
+ }
+
+ /** Add a new property to the message
+ *
+ * @param name Property name
+ * @param value Property value
+ *
+ * @return The object for method chaining
+ */
+ def addProperty(name: String, value: String): MessageToDevice = {
+ if (properties == None) {
+ properties = Some(Map[String, String]())
+ }
+ this.properties.get += ((name, value))
+ this
+ }
+
+ /** Returns a message ready to be sent to a device
+ *
+ * @return Message for the device
+ */
+ def message: Message = {
+ val message = new Message(content)
+ message.setTo(deviceId)
+
+ message.setDeliveryAcknowledgement(ack)
+ message.setMessageId(java.util.UUID.randomUUID().toString())
+ if (correlationId != None) message.setCorrelationId(correlationId.get)
+ if (expiry != None) message.setExpiryTimeUtc(expiry.get)
+ if (userId != None) message.setUserId(userId.get)
+ if (properties != None) message.setProperties(properties.get.asJava)
+
+ message
+ }
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/MethodOnDevice.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/MethodOnDevice.scala
new file mode 100644
index 0000000..b54a929
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/MethodOnDevice.scala
@@ -0,0 +1,9 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact
+
+/** Method to invoke on the device
+ */
+class MethodOnDevice {
+
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/Offset.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/Offset.scala
deleted file mode 100644
index 36a2202..0000000
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/Offset.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-package com.microsoft.azure.iot.iothubreact
-
-object Offset {
- def apply(value: String) = new Offset(value)
-}
-
-/** Class used to pass the starting point to IoTHub storage
- *
- * @param value The offset value
- */
-class Offset(val value: String) {
-
-}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/ResumeOnError.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/ResumeOnError.scala
index 6da1b7a..8f1526f 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/ResumeOnError.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/ResumeOnError.scala
@@ -6,11 +6,12 @@ import akka.actor.ActorSystem
import akka.stream.{ActorMaterializer, ActorMaterializerSettings, Supervision}
/** Akka streaming settings to resume the stream in case of errors
- *
- * @todo Review the usage of a supervisor with Akka streams
*/
case object ResumeOnError extends Logger {
+ // TODO: Revisit the usage of a supervisor with Akka streams
+ // TODO: Try to remove the logger and save threads, or reuse the existing event stream
+
private[this] val decider: Supervision.Decider = {
case e: Exception ⇒ {
log.error(e, e.getMessage)
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/StopOnError.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/StopOnError.scala
index cdc51b5..78a32ed 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/StopOnError.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/StopOnError.scala
@@ -6,11 +6,12 @@ import akka.actor.ActorSystem
import akka.stream.{ActorMaterializer, ActorMaterializerSettings, Supervision}
/** Akka streaming settings to stop the stream in case of errors
- *
- * @todo Review the usage of a supervisor with Akka streams
*/
case object StopOnError extends Logger {
+ // TODO: Review the usage of a supervisor with Akka streams
+ // TODO: Try to remove the logger and save threads, or reuse the existing event stream
+
private[this] val decider: Supervision.Decider = {
case e: Exception ⇒ {
log.error(e, e.getMessage)
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/StreamManager.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/StreamManager.scala
new file mode 100644
index 0000000..7ca05e8
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/StreamManager.scala
@@ -0,0 +1,37 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact
+
+import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler}
+import akka.stream.{Attributes, FlowShape, Inlet, Outlet}
+
+private[iothubreact] class StreamManager[A]
+ extends GraphStage[FlowShape[A, A]] {
+
+ private[this] val in = Inlet[A]("StreamCanceller.Flow.in")
+ private[this] val out = Outlet[A]("StreamCanceller.Flow.out")
+ private[this] var closeSignal = false
+
+ override val shape = FlowShape.of(in, out)
+
+ def close(): Unit = closeSignal = true
+
+ override def createLogic(attr: Attributes): GraphStageLogic = {
+ new GraphStageLogic(shape) {
+
+ setHandler(in, new InHandler {
+ override def onPush(): Unit = push(out, grab(in))
+ })
+
+ setHandler(out, new OutHandler {
+ override def onPull(): Unit = {
+ if (closeSignal) {
+ cancel(in)
+ } else {
+ pull(in)
+ }
+ }
+ })
+ }
+ }
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/TypedSink.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/TypedSink.scala
new file mode 100644
index 0000000..6738617
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/TypedSink.scala
@@ -0,0 +1,47 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact
+
+import java.util.concurrent.CompletionStage
+
+import akka.Done
+import akka.stream.javadsl.{Sink ⇒ JavaSink}
+import akka.stream.scaladsl.{Sink ⇒ ScalaSink}
+import com.microsoft.azure.iot.iothubreact.sinks._
+
+import scala.concurrent.Future
+
+/** Type class to support different classes of communication through IoTHub
+ *
+ * @tparam A
+ */
+trait TypedSink[A] {
+ def scalaDefinition: ScalaSink[A, Future[Done]]
+
+ def javaDefinition: JavaSink[A, CompletionStage[Done]]
+}
+
+/** Type class implementations for MessageToDevice, MethodOnDevice, DeviceProperties
+ * Automatically selects the appropriate sink depending on type of communication.
+ */
+object TypedSink {
+
+ implicit object MessageToDeviceSinkDef extends TypedSink[MessageToDevice] {
+ override def scalaDefinition: ScalaSink[MessageToDevice, Future[Done]] = MessageToDeviceSink().scalaSink()
+
+ override def javaDefinition: JavaSink[MessageToDevice, CompletionStage[Done]] = MessageToDeviceSink().javaSink()
+ }
+
+ implicit object MethodOnDeviceSinkDef extends TypedSink[MethodOnDevice] {
+ override def scalaDefinition: ScalaSink[MethodOnDevice, Future[Done]] = MethodOnDeviceSink().scalaSink()
+
+ override def javaDefinition: JavaSink[MethodOnDevice, CompletionStage[Done]] = MethodOnDeviceSink().javaSink()
+ }
+
+ implicit object DevicePropertiesSinkDef extends TypedSink[DeviceProperties] {
+ override def scalaDefinition: ScalaSink[DeviceProperties, Future[Done]] = DevicePropertiesSink().scalaSink()
+
+ override def javaDefinition: JavaSink[DeviceProperties, CompletionStage[Done]] = DevicePropertiesSink().javaSink()
+ }
+
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/Backends/CheckpointBackend.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/Backends/CheckpointBackend.scala
index 208a151..d17aed3 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/Backends/CheckpointBackend.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/Backends/CheckpointBackend.scala
@@ -4,7 +4,7 @@ package com.microsoft.azure.iot.iothubreact.checkpointing.backends
import com.microsoft.azure.iot.iothubreact.checkpointing.Configuration
-private[iothubreact] trait CheckpointBackend {
+trait CheckpointBackend {
def checkpointNamespace: String = Configuration.storageNamespace
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/CheckpointService.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/CheckpointService.scala
index 6040918..7d3fc76 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/CheckpointService.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/CheckpointService.scala
@@ -42,18 +42,12 @@ private[iothubreact] class CheckpointService(partition: Int)
.fromExecutorService(Executors.newFixedThreadPool(sys.runtime.availableProcessors))
// Contains the offsets up to one hour ago, max 1 offset per second (max size = 3600)
- private[this] val queue = new scala.collection.mutable.Queue[OffsetsData]
+ private[this] val queue = new scala.collection.mutable.Queue[OffsetsData]
// Count the offsets tracked in the queue (!= queue.size)
- private[this] var queuedOffsets: Long = 0
- private[this] var currentOffset: String = IoTHubPartition.OffsetStartOfStream
- private[this] val storage = getCheckpointBackend
-
- // Before the actor starts we schedule a recurring storage write
- override def preStart(): Unit = {
- val time = Configuration.checkpointFrequency
- context.system.scheduler.schedule(time, time, self, StoreOffset)
- log.info(s"Scheduled checkpoint for partition ${partition} every ${time.toMillis} ms")
- }
+ private[this] var queuedOffsets : Long = 0
+ private[this] var currentOffset : String = IoTHubPartition.OffsetStartOfStream
+ private[this] val storage = getCheckpointBackend
+ private[this] var schedulerStarted: Boolean = false
override def receive: Receive = notReady
@@ -144,6 +138,14 @@ private[iothubreact] class CheckpointService(partition: Int)
}
def updateOffsetAction(offset: String) = {
+
+ if (!schedulerStarted) {
+ val time = Configuration.checkpointFrequency
+ schedulerStarted = true
+ context.system.scheduler.schedule(time, time, self, StoreOffset)
+ log.info(s"Scheduled checkpoint for partition ${partition} every ${time.toMillis} ms")
+ }
+
if (offset.toLong > currentOffset.toLong) {
val epoch = Instant.now.getEpochSecond
@@ -163,7 +165,7 @@ private[iothubreact] class CheckpointService(partition: Int)
}
}
- // @todo Support plugins
+ // TODO: Support plugins
def getCheckpointBackend: CheckpointBackend = {
val conf = Configuration.checkpointBackendType
conf.toUpperCase match {
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/Configuration.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/Configuration.scala
index 52ad42c..997a9c9 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/Configuration.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/Configuration.scala
@@ -10,13 +10,13 @@ import scala.concurrent.duration._
import scala.language.postfixOps
/** Hold IoT Hub stream checkpointing configuration settings
- *
- * @todo Allow to use multiple configurations, for instance while processing multiple
- * streams a client will need a dedicated checkpoint container for each stream
*/
private[iothubreact] object Configuration {
- private[this] val confPath = "iothub-checkpointing."
+ // TODO: Allow to use multiple configurations, e.g. while processing multiple streams
+ // a client will need a dedicated checkpoint container for each stream
+
+ private[this] val confPath = "iothub-react.checkpointing."
private[this] val conf: Config = ConfigFactory.load()
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/SavePositionOnPull.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/SavePositionOnPull.scala
index 74f54e3..27c2560 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/SavePositionOnPull.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/SavePositionOnPull.scala
@@ -4,7 +4,7 @@ package com.microsoft.azure.iot.iothubreact.checkpointing
import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler}
import akka.stream.{Attributes, FlowShape, Inlet, Outlet}
-import com.microsoft.azure.iot.iothubreact.IoTMessage
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
import com.microsoft.azure.iot.iothubreact.checkpointing.CheckpointService.UpdateOffset
/** Flow receiving and emitting IoT messages, while keeping note of the last offset seen
@@ -12,10 +12,10 @@ import com.microsoft.azure.iot.iothubreact.checkpointing.CheckpointService.Updat
* @param partition IoT hub partition number
*/
private[iothubreact] class SavePositionOnPull(partition: Int)
- extends GraphStage[FlowShape[IoTMessage, IoTMessage]] {
+ extends GraphStage[FlowShape[MessageFromDevice, MessageFromDevice]] {
- val in = Inlet[IoTMessage]("Checkpoint.Flow.in")
- val out = Outlet[IoTMessage]("Checkpoint.Flow.out")
+ val in = Inlet[MessageFromDevice]("Checkpoint.Flow.in")
+ val out = Outlet[MessageFromDevice]("Checkpoint.Flow.out")
val none = ""
override val shape = FlowShape.of(in, out)
@@ -32,7 +32,7 @@ private[iothubreact] class SavePositionOnPull(partition: Int)
// when a message enters the stage we safe its offset
setHandler(in, new InHandler {
override def onPush(): Unit = {
- val message: IoTMessage = grab(in)
+ val message: MessageFromDevice = grab(in)
if (!message.isKeepAlive) lastOffsetSent = message.offset
push(out, message)
}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/backends/cassandra/lib/Table.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/backends/cassandra/lib/Table.scala
index 5143c1b..56fccc6 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/backends/cassandra/lib/Table.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/checkpointing/backends/cassandra/lib/Table.scala
@@ -35,14 +35,15 @@ private[iothubreact] case class Table[T <: ToCassandra](session: Session, keyspa
}
/** Retrieve a record
- *
- * @todo return a T object
*
* @param condition CQL condition
*
* @return a record as string
*/
def select(condition: String): JObject = {
+
+ // TODO: return a T object
+
val row = session.execute(s"SELECT * FROM $keyspace.$tableName WHERE ${condition}").one()
var partition = -1
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/Device.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/Device.scala
new file mode 100644
index 0000000..f00a7a6
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/Device.scala
@@ -0,0 +1,17 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact.filters
+
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+
+object Device {
+ def apply(deviceId: String)(m: MessageFromDevice) = new Device(deviceId).only(m)
+}
+
+/** Filter by device ID
+ *
+ * @param deviceId Device ID
+ */
+class Device(val deviceId: String) {
+ def only(m: MessageFromDevice): Boolean = m.deviceId == deviceId
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/Ignore.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/Ignore.scala
index 2bfc837..75296ff 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/Ignore.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/Ignore.scala
@@ -2,16 +2,16 @@
package com.microsoft.azure.iot.iothubreact.filters
-import com.microsoft.azure.iot.iothubreact.IoTMessage
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
/** Set of filters to ignore IoT traffic
*
*/
private[iothubreact] object Ignore {
- /** Ignore the keep alive signal injected by IoTMessageSource
+ /** Ignore the keep alive signal injected by MessageFromDeviceSource
*
* @return True if the message must be processed
*/
- def keepAlive = (m: IoTMessage) ⇒ !m.isKeepAlive
+ def keepAlive = (m: MessageFromDevice) ⇒ !m.isKeepAlive
}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/MessageType.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/MessageType.scala
new file mode 100644
index 0000000..d4dc57a
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/MessageType.scala
@@ -0,0 +1,23 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact.filters
+
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+
+object MessageType {
+ def apply(messageType: String)(m: MessageFromDevice) = new MessageType(messageType).filter(m)
+}
+
+/** Filter by message type
+ *
+ * @param messageType Message type
+ */
+class MessageType(val messageType: String) {
+ def filter(m: MessageFromDevice): Boolean = m.messageType == messageType
+}
+
+
+
+
+
+
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/Model.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/Model.scala
deleted file mode 100644
index 5f24c21..0000000
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/filters/Model.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-package com.microsoft.azure.iot.iothubreact.filters
-
-import com.microsoft.azure.iot.iothubreact.IoTMessage
-
-object Model {
- def apply(model: String)(m: IoTMessage) = new Model(model).only(m)
-}
-
-class Model(val model: String) {
- def only(m: IoTMessage): Boolean = m.model == model
-}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/IoTHub.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/IoTHub.scala
index 39910c5..c954630 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/IoTHub.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/IoTHub.scala
@@ -3,29 +3,68 @@
package com.microsoft.azure.iot.iothubreact.javadsl
import java.time.Instant
+import java.util.concurrent.CompletionStage
-import akka.NotUsed
-import akka.stream.javadsl.{Source ⇒ SourceJavaDSL}
-import com.microsoft.azure.iot.iothubreact.{IoTMessage, Offset}
-import com.microsoft.azure.iot.iothubreact.scaladsl.{IoTHub ⇒ IoTHubScalaDSL}
-
-import scala.collection.JavaConverters._
+import akka.stream.javadsl.{Sink, Source ⇒ JavaSource}
+import akka.{Done, NotUsed}
+import com.microsoft.azure.iot.iothubreact._
+import com.microsoft.azure.iot.iothubreact.scaladsl.{IoTHub ⇒ IoTHubScalaDSL, OffsetList ⇒ OffsetListScalaDSL, PartitionList ⇒ PartitionListScalaDSL}
+import com.microsoft.azure.iot.iothubreact.sinks.{DevicePropertiesSink, MessageToDeviceSink, MethodOnDeviceSink}
/** Provides a streaming source to retrieve messages from Azure IoT Hub
- *
- * @todo (*) Provide ClearCheckpoints() method to clear the state
*/
class IoTHub() {
+ // TODO: Provide ClearCheckpoints() method to clear the state
+
private lazy val iotHub = new IoTHubScalaDSL()
+ /** Stop the stream
+ */
+ def close(): Unit = {
+ iotHub.close()
+ }
+
+ /** Sink to send asynchronous messages to IoT devices
+ *
+ * @return Streaming sink
+ */
+ def messageSink: Sink[MessageToDevice, CompletionStage[Done]] =
+ MessageToDeviceSink().javaSink()
+
+ /** Sink to call synchronous methods on IoT devices
+ *
+ * @return Streaming sink
+ */
+ def methodSink: Sink[MethodOnDevice, CompletionStage[Done]] =
+ MethodOnDeviceSink().javaSink()
+
+ /** Sink to asynchronously set properties on IoT devices
+ *
+ * @return Streaming sink
+ */
+ def propertySink: Sink[DeviceProperties, CompletionStage[Done]] =
+ DevicePropertiesSink().javaSink()
+
/** Stream returning all the messages since the beginning, from all the
* configured partitions.
*
* @return A source of IoT messages
*/
- def source(): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHub.source())
+ def source(): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source())
+ }
+
+ /** Stream returning all the messages from all the requested partitions.
+ * If checkpointing the stream starts from the last position saved, otherwise
+ * it starts from the beginning.
+ *
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(partitions: PartitionList): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(PartitionListScalaDSL(partitions)))
}
/** Stream returning all the messages starting from the given time, from all
@@ -35,8 +74,20 @@ class IoTHub() {
*
* @return A source of IoT messages
*/
- def source(startTime: Instant): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHub.source(startTime))
+ def source(startTime: Instant): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(startTime))
+ }
+
+ /** Stream returning all the messages starting from the given time, from all
+ * the configured partitions.
+ *
+ * @param startTime Starting position expressed in time
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(startTime: Instant, partitions: PartitionList): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(startTime, PartitionListScalaDSL(partitions)))
}
/** Stream returning all the messages from all the configured partitions.
@@ -47,8 +98,21 @@ class IoTHub() {
*
* @return A source of IoT messages
*/
- def source(withCheckpoints: Boolean): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHub.source(withCheckpoints))
+ def source(withCheckpoints: java.lang.Boolean): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(withCheckpoints))
+ }
+
+ /** Stream returning all the messages from all the configured partitions.
+ * If checkpointing the stream starts from the last position saved, otherwise
+ * it starts from the beginning.
+ *
+ * @param withCheckpoints Whether to read/write the stream position (default: true)
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(withCheckpoints: java.lang.Boolean, partitions: PartitionList): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(withCheckpoints, PartitionListScalaDSL(partitions)))
}
/** Stream returning all the messages starting from the given offset, from all
@@ -58,8 +122,20 @@ class IoTHub() {
*
* @return A source of IoT messages
*/
- def source(offsets: java.util.Collection[Offset]): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHub.source(offsets.asScala.toList))
+ def source(offsets: OffsetList): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(OffsetListScalaDSL(offsets)))
+ }
+
+ /** Stream returning all the messages starting from the given offset, from all
+ * the configured partitions.
+ *
+ * @param offsets Starting position for all the partitions
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(offsets: OffsetList, partitions: PartitionList): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(OffsetListScalaDSL(offsets), PartitionListScalaDSL(partitions)))
}
/** Stream returning all the messages starting from the given time, from all
@@ -70,8 +146,21 @@ class IoTHub() {
*
* @return A source of IoT messages
*/
- def source(startTime: Instant, withCheckpoints: Boolean): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHub.source(startTime, withCheckpoints))
+ def source(startTime: Instant, withCheckpoints: java.lang.Boolean): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(startTime, withCheckpoints))
+ }
+
+ /** Stream returning all the messages starting from the given time, from all
+ * the configured partitions.
+ *
+ * @param startTime Starting position expressed in time
+ * @param withCheckpoints Whether to read/write the stream position (default: true)
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(startTime: Instant, withCheckpoints: java.lang.Boolean, partitions: PartitionList): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(startTime, withCheckpoints, PartitionListScalaDSL(partitions)))
}
/** Stream returning all the messages starting from the given offset, from all
@@ -82,7 +171,20 @@ class IoTHub() {
*
* @return A source of IoT messages
*/
- def source(offsets: java.util.Collection[Offset], withCheckpoints: Boolean): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHub.source(offsets.asScala.toList, withCheckpoints))
+ def source(offsets: OffsetList, withCheckpoints: java.lang.Boolean): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(OffsetListScalaDSL(offsets), withCheckpoints))
+ }
+
+ /** Stream returning all the messages starting from the given offset, from all
+ * the configured partitions.
+ *
+ * @param offsets Starting position for all the partitions
+ * @param withCheckpoints Whether to read/write the stream position (default: true)
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(offsets: OffsetList, withCheckpoints: java.lang.Boolean, partitions: PartitionList): JavaSource[MessageFromDevice, NotUsed] = {
+ new JavaSource(iotHub.source(OffsetListScalaDSL(offsets), withCheckpoints, PartitionListScalaDSL(partitions)))
}
}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/IoTHubPartition.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/IoTHubPartition.scala
deleted file mode 100644
index 5c877a5..0000000
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/IoTHubPartition.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-package com.microsoft.azure.iot.iothubreact.javadsl
-
-import java.time.Instant
-
-import akka.NotUsed
-import akka.stream.javadsl.{Source ⇒ SourceJavaDSL}
-import com.microsoft.azure.iot.iothubreact.{IoTMessage, Offset}
-import com.microsoft.azure.iot.iothubreact.scaladsl.{IoTHubPartition ⇒ IoTHubPartitionScalaDSL}
-
-/** Provides a streaming source to retrieve messages from one Azure IoT Hub partition
- *
- * @param partition IoT hub partition number (0-based). The number of
- * partitions is set during the deployment.
- *
- * @todo (*) Provide ClearCheckpoints() method to clear the state
- * @todo Support reading the same partition from multiple clients
- */
-class IoTHubPartition(val partition: Int) {
-
- // Offset used to start reading from the beginning
- final val OffsetStartOfStream: String = IoTHubPartitionScalaDSL.OffsetStartOfStream
-
- // Public constant: used internally to signal when there is no position saved in the storage
- // To be used by custom backend implementations
- final val OffsetCheckpointNotFound: String = IoTHubPartitionScalaDSL.OffsetCheckpointNotFound
-
- private lazy val iotHubPartition = new IoTHubPartitionScalaDSL(partition)
-
- /** Stream returning all the messages since the beginning, from the specified
- * partition.
- *
- * @return A source of IoT messages
- */
- def source(): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHubPartition.source())
- }
-
- /** Stream returning all the messages from the given offset, from the
- * specified partition.
- *
- * @param startTime Starting position expressed in time
- *
- * @return A source of IoT messages
- */
- def source(startTime: Instant): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHubPartition.source(startTime))
- }
-
- /** Stream returning all the messages. If checkpointing, the stream starts from the last position
- * saved, otherwise it starts from the beginning.
- *
- * @param withCheckpoints Whether to read/write the stream position (default: true)
- *
- * @return A source of IoT messages
- */
- def source(withCheckpoints: Boolean): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHubPartition.source(withCheckpoints))
- }
-
- /** Stream returning all the messages from the given offset, from the
- * specified partition.
- *
- * @param offset Starting position, offset of the first message
- *
- * @return A source of IoT messages
- */
- def source(offset: Offset): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHubPartition.source(offset))
- }
-
- /** Stream returning all the messages from the given offset
- *
- * @param startTime Starting position expressed in time
- * @param withCheckpoints Whether to read/write the stream position (default: true)
- *
- * @return A source of IoT messages
- */
- def source(startTime: Instant, withCheckpoints: Boolean): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHubPartition.source(startTime, withCheckpoints))
- }
-
- /** Stream returning all the messages from the given offset
- *
- * @param offset Starting position, offset of the first message
- * @param withCheckpoints Whether to read/write the stream position (default: true)
- *
- * @return A source of IoT messages
- */
- def source(offset: Offset, withCheckpoints: Boolean): SourceJavaDSL[IoTMessage, NotUsed] = {
- new SourceJavaDSL(iotHubPartition.source(offset, withCheckpoints))
- }
-}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/OffsetList.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/OffsetList.scala
new file mode 100644
index 0000000..20ce826
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/OffsetList.scala
@@ -0,0 +1,11 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact.javadsl
+
+/** A list of Offsets (type erasure workaround)
+ *
+ * @param values The offset value
+ */
+class OffsetList(val values: java.util.List[java.lang.String]) {
+
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/PartitionList.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/PartitionList.scala
new file mode 100644
index 0000000..bc1281c
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/javadsl/PartitionList.scala
@@ -0,0 +1,11 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact.javadsl
+
+/** A list of Partition IDs (type erasure workaround)
+ *
+ * @param values List of partition IDs
+ */
+class PartitionList(val values: java.util.List[java.lang.Integer]) {
+
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/IoTHub.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/IoTHub.scala
index 7ddc83d..8e54e26 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/IoTHub.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/IoTHub.scala
@@ -4,26 +4,64 @@ package com.microsoft.azure.iot.iothubreact.scaladsl
import java.time.Instant
-import akka.NotUsed
-import akka.stream.SourceShape
+import akka.stream._
import akka.stream.scaladsl._
+import akka.{Done, NotUsed}
import com.microsoft.azure.iot.iothubreact._
import com.microsoft.azure.iot.iothubreact.checkpointing.{Configuration ⇒ CPConfiguration}
+import com.microsoft.azure.iot.iothubreact.sinks.{DevicePropertiesSink, MessageToDeviceSink, MethodOnDeviceSink}
+import scala.concurrent.Future
import scala.language.postfixOps
-object IoTHub {
- def apply(): IoTHub = new IoTHub
-}
-
/** Provides a streaming source to retrieve messages from Azure IoT Hub
- *
- * @todo (*) Provide ClearCheckpoints() method to clear the state
*/
-class IoTHub extends Logger {
+case class IoTHub() extends Logger {
+
+ // TODO: Provide ClearCheckpoints() method to clear the state
+
+ private[this] val streamManager = new StreamManager[MessageFromDevice]
+
+ private[this] def allPartitions = Some(PartitionList(0 until Configuration.iotHubPartitions))
private[this] def fromStart =
- Some(List.fill[Offset](Configuration.iotHubPartitions)(Offset(IoTHubPartition.OffsetStartOfStream)))
+ Some(OffsetList(List.fill[String](Configuration.iotHubPartitions)(IoTHubPartition.OffsetStartOfStream)))
+
+ /** Stop the stream
+ */
+ def close(): Unit = {
+ streamManager.close()
+ }
+
+ /** Sink to communicate with IoT devices
+ *
+ * @param typedSink Sink factory
+ * @tparam A Type of communication (message, method, property)
+ *
+ * @return Streaming sink
+ */
+ def sink[A]()(implicit typedSink: TypedSink[A]): Sink[A, Future[Done]] = typedSink.scalaDefinition
+
+ /** Sink to send asynchronous messages to IoT devices
+ *
+ * @return Streaming sink
+ */
+ def messageSink: Sink[MessageToDevice, Future[Done]] =
+ MessageToDeviceSink().scalaSink()
+
+ /** Sink to call synchronous methods on IoT devices
+ *
+ * @return Streaming sink
+ */
+ def methodSink: Sink[MethodOnDevice, Future[Done]] =
+ MethodOnDeviceSink().scalaSink()
+
+ /** Sink to asynchronously set properties on IoT devices
+ *
+ * @return Streaming sink
+ */
+ def propertySink: Sink[DeviceProperties, Future[Done]] =
+ DevicePropertiesSink().scalaSink()
/** Stream returning all the messages from all the configured partitions.
* If checkpointing the stream starts from the last position saved, otherwise
@@ -31,9 +69,26 @@ class IoTHub extends Logger {
*
* @return A source of IoT messages
*/
- def source(): Source[IoTMessage, NotUsed] = {
+ def source(): Source[MessageFromDevice, NotUsed] = {
getSource(
withTimeOffset = false,
+ partitions = allPartitions,
+ offsets = fromStart,
+ withCheckpoints = false)
+ }
+
+ /** Stream returning all the messages from all the requested partitions.
+ * If checkpointing the stream starts from the last position saved, otherwise
+ * it starts from the beginning.
+ *
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(partitions: PartitionList): Source[MessageFromDevice, NotUsed] = {
+ getSource(
+ withTimeOffset = false,
+ partitions = Some(partitions),
offsets = fromStart,
withCheckpoints = false)
}
@@ -45,9 +100,26 @@ class IoTHub extends Logger {
*
* @return A source of IoT messages
*/
- def source(startTime: Instant): Source[IoTMessage, NotUsed] = {
+ def source(startTime: Instant): Source[MessageFromDevice, NotUsed] = {
getSource(
withTimeOffset = true,
+ partitions = allPartitions,
+ startTime = startTime,
+ withCheckpoints = false)
+ }
+
+ /** Stream returning all the messages starting from the given time, from all
+ * the requested partitions.
+ *
+ * @param startTime Starting position expressed in time
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(startTime: Instant, partitions: PartitionList): Source[MessageFromDevice, NotUsed] = {
+ getSource(
+ withTimeOffset = true,
+ partitions = Some(partitions),
startTime = startTime,
withCheckpoints = false)
}
@@ -60,9 +132,27 @@ class IoTHub extends Logger {
*
* @return A source of IoT messages
*/
- def source(withCheckpoints: Boolean): Source[IoTMessage, NotUsed] = {
+ def source(withCheckpoints: Boolean): Source[MessageFromDevice, NotUsed] = {
getSource(
withTimeOffset = false,
+ partitions = allPartitions,
+ offsets = fromStart,
+ withCheckpoints = withCheckpoints && CPConfiguration.isEnabled)
+ }
+
+ /** Stream returning all the messages from all the configured partitions.
+ * If checkpointing the stream starts from the last position saved, otherwise
+ * it starts from the beginning.
+ *
+ * @param withCheckpoints Whether to read/write the stream position (default: true)
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(withCheckpoints: Boolean, partitions: PartitionList): Source[MessageFromDevice, NotUsed] = {
+ getSource(
+ withTimeOffset = false,
+ partitions = Some(partitions),
offsets = fromStart,
withCheckpoints = withCheckpoints && CPConfiguration.isEnabled)
}
@@ -74,9 +164,26 @@ class IoTHub extends Logger {
*
* @return A source of IoT messages
*/
- def source(offsets: List[Offset]): Source[IoTMessage, NotUsed] = {
+ def source(offsets: OffsetList): Source[MessageFromDevice, NotUsed] = {
getSource(
withTimeOffset = false,
+ partitions = allPartitions,
+ offsets = Some(offsets),
+ withCheckpoints = false)
+ }
+
+ /** Stream returning all the messages starting from the given offset, from all
+ * the configured partitions.
+ *
+ * @param offsets Starting position for all the partitions
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(offsets: OffsetList, partitions: PartitionList): Source[MessageFromDevice, NotUsed] = {
+ getSource(
+ withTimeOffset = false,
+ partitions = Some(partitions),
offsets = Some(offsets),
withCheckpoints = false)
}
@@ -89,9 +196,27 @@ class IoTHub extends Logger {
*
* @return A source of IoT messages
*/
- def source(startTime: Instant, withCheckpoints: Boolean): Source[IoTMessage, NotUsed] = {
+ def source(startTime: Instant, withCheckpoints: Boolean): Source[MessageFromDevice, NotUsed] = {
getSource(
withTimeOffset = true,
+ partitions = allPartitions,
+ startTime = startTime,
+ withCheckpoints = withCheckpoints && CPConfiguration.isEnabled)
+ }
+
+ /** Stream returning all the messages starting from the given time, from all
+ * the configured partitions.
+ *
+ * @param startTime Starting position expressed in time
+ * @param withCheckpoints Whether to read/write the stream position (default: true)
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(startTime: Instant, withCheckpoints: Boolean, partitions: PartitionList): Source[MessageFromDevice, NotUsed] = {
+ getSource(
+ withTimeOffset = true,
+ partitions = Some(partitions),
startTime = startTime,
withCheckpoints = withCheckpoints && CPConfiguration.isEnabled)
}
@@ -104,9 +229,27 @@ class IoTHub extends Logger {
*
* @return A source of IoT messages
*/
- def source(offsets: List[Offset], withCheckpoints: Boolean): Source[IoTMessage, NotUsed] = {
+ def source(offsets: OffsetList, withCheckpoints: Boolean): Source[MessageFromDevice, NotUsed] = {
getSource(
withTimeOffset = false,
+ partitions = allPartitions,
+ offsets = Some(offsets),
+ withCheckpoints = withCheckpoints && CPConfiguration.isEnabled)
+ }
+
+ /** Stream returning all the messages starting from the given offset, from all
+ * the configured partitions.
+ *
+ * @param offsets Starting position for all the partitions
+ * @param withCheckpoints Whether to read/write the stream position (default: true)
+ * @param partitions Partitions to process
+ *
+ * @return A source of IoT messages
+ */
+ def source(offsets: OffsetList, withCheckpoints: Boolean, partitions: PartitionList): Source[MessageFromDevice, NotUsed] = {
+ getSource(
+ withTimeOffset = false,
+ partitions = Some(partitions),
offsets = Some(offsets),
withCheckpoints = withCheckpoints && CPConfiguration.isEnabled)
}
@@ -114,6 +257,7 @@ class IoTHub extends Logger {
/** Stream returning all the messages, from the given starting point, optionally with
* checkpointing
*
+ * @param partitions Partitions to process
* @param offsets Starting positions using the offset property in the messages
* @param startTime Starting position expressed in time
* @param withTimeOffset Whether the start point is a timestamp
@@ -122,22 +266,23 @@ class IoTHub extends Logger {
* @return A source of IoT messages
*/
private[this] def getSource(
- offsets: Option[List[Offset]] = None,
+ partitions: Option[PartitionList] = None,
+ offsets: Option[OffsetList] = None,
startTime: Instant = Instant.MIN,
withTimeOffset: Boolean = false,
- withCheckpoints: Boolean = true): Source[IoTMessage, NotUsed] = {
+ withCheckpoints: Boolean = true): Source[MessageFromDevice, NotUsed] = {
val graph = GraphDSL.create() {
implicit b ⇒
import GraphDSL.Implicits._
- val merge = b.add(Merge[IoTMessage](Configuration.iotHubPartitions))
+ val merge = b.add(Merge[MessageFromDevice](partitions.get.values.size))
- for (partition ← 0 until Configuration.iotHubPartitions) {
+ for (partition ← partitions.get.values) {
val graph = if (withTimeOffset)
- IoTHubPartition(partition).source(startTime, withCheckpoints)
+ IoTHubPartition(partition).source(startTime, withCheckpoints).via(streamManager)
else
- IoTHubPartition(partition).source(offsets.get(partition), withCheckpoints)
+ IoTHubPartition(partition).source(offsets.get.values(partition), withCheckpoints).via(streamManager)
val source = Source.fromGraph(graph).async
source ~> merge
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/IoTHubPartition.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/IoTHubPartition.scala
index 2709628..f12c1f8 100644
--- a/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/IoTHubPartition.scala
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/IoTHubPartition.scala
@@ -26,77 +26,14 @@ object IoTHubPartition extends Logger {
// Public constant: used internally to signal when there is no position saved in the storage
// To be used by custom backend implementations
final val OffsetCheckpointNotFound: String = "{offset checkpoint not found}"
-
- /** Create a streaming source to retrieve messages from one Azure IoT Hub partition
- *
- * @param partition IoT hub partition number
- *
- * @return IoT hub instance
- */
- def apply(partition: Int): IoTHubPartition = new IoTHubPartition(partition)
}
/** Provide a streaming source to retrieve messages from one Azure IoT Hub partition
*
* @param partition IoT hub partition number (0-based). The number of
* partitions is set during the deployment.
- *
- * @todo (*) Provide ClearCheckpoints() method to clear the state
- * @todo Support reading the same partition from multiple clients
*/
-class IoTHubPartition(val partition: Int) extends Logger {
-
- /** Stream returning all the messages. If checkpointing is enabled in the global configuration
- * then the stream starts from the last position saved, otherwise it starts from the beginning.
- *
- * @return A source of IoT messages
- */
- def source(): Source[IoTMessage, NotUsed] = {
- getSource(
- withTimeOffset = false,
- offset = Offset(IoTHubPartition.OffsetStartOfStream),
- withCheckpoints = false)
- }
-
- /** Stream returning all the messages from the given offset
- *
- * @param startTime Starting position expressed in time
- *
- * @return A source of IoT messages
- */
- def source(startTime: Instant): Source[IoTMessage, NotUsed] = {
- getSource(
- withTimeOffset = true,
- startTime = startTime,
- withCheckpoints = false)
- }
-
- /** Stream returning all the messages. If checkpointing, the stream starts from the last position
- * saved, otherwise it starts from the beginning.
- *
- * @param withCheckpoints Whether to read/write the stream position (default: true)
- *
- * @return A source of IoT messages
- */
- def source(withCheckpoints: Boolean): Source[IoTMessage, NotUsed] = {
- getSource(
- withTimeOffset = false,
- offset = Offset(IoTHubPartition.OffsetStartOfStream),
- withCheckpoints = withCheckpoints && CPConfiguration.isEnabled)
- }
-
- /** Stream returning all the messages from the given offset
- *
- * @param offset Starting position, offset of the first message
- *
- * @return A source of IoT messages
- */
- def source(offset: Offset): Source[IoTMessage, NotUsed] = {
- getSource(
- withTimeOffset = false,
- offset = offset,
- withCheckpoints = false)
- }
+private[iothubreact] case class IoTHubPartition(val partition: Int) extends Logger {
/** Stream returning all the messages from the given offset
*
@@ -105,7 +42,7 @@ class IoTHubPartition(val partition: Int) extends Logger {
*
* @return A source of IoT messages
*/
- def source(startTime: Instant, withCheckpoints: Boolean): Source[IoTMessage, NotUsed] = {
+ def source(startTime: Instant, withCheckpoints: Boolean): Source[MessageFromDevice, NotUsed] = {
getSource(
withTimeOffset = true,
startTime = startTime,
@@ -119,7 +56,7 @@ class IoTHubPartition(val partition: Int) extends Logger {
*
* @return A source of IoT messages
*/
- def source(offset: Offset, withCheckpoints: Boolean): Source[IoTMessage, NotUsed] = {
+ def source(offset: String, withCheckpoints: Boolean): Source[MessageFromDevice, NotUsed] = {
getSource(
withTimeOffset = false,
offset = offset,
@@ -138,12 +75,12 @@ class IoTHubPartition(val partition: Int) extends Logger {
*/
private[this] def getSource(
withTimeOffset: Boolean,
- offset: Offset = Offset(""),
+ offset: String = "",
startTime: Instant = Instant.MIN,
- withCheckpoints: Boolean = true): Source[IoTMessage, NotUsed] = {
+ withCheckpoints: Boolean = true): Source[MessageFromDevice, NotUsed] = {
// Load the offset from the storage (if needed)
- var _offset = offset.value
+ var _offset = offset
var _withTimeOffset = withTimeOffset
if (withCheckpoints) {
val savedOffset = GetSavedOffset()
@@ -155,10 +92,10 @@ class IoTHubPartition(val partition: Int) extends Logger {
}
// Build the source starting by time or by offset
- val source: Source[IoTMessage, NotUsed] = if (_withTimeOffset)
- IoTMessageSource(partition, startTime, withCheckpoints).filter(Ignore.keepAlive)
+ val source: Source[MessageFromDevice, NotUsed] = if (_withTimeOffset)
+ MessageFromDeviceSource(partition, startTime, withCheckpoints).filter(Ignore.keepAlive)
else
- IoTMessageSource(partition, _offset, withCheckpoints).filter(Ignore.keepAlive)
+ MessageFromDeviceSource(partition, _offset, withCheckpoints).filter(Ignore.keepAlive)
// Inject a flow to store the stream position after each pull
if (withCheckpoints) {
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/OffsetList.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/OffsetList.scala
new file mode 100644
index 0000000..bd9b591
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/OffsetList.scala
@@ -0,0 +1,21 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact.scaladsl
+
+import com.microsoft.azure.iot.iothubreact.javadsl.{OffsetList ⇒ JavaOffsetList}
+
+import scala.collection.JavaConverters._
+
+object OffsetList {
+ def apply(values: Seq[String]) = new OffsetList(values)
+
+ def apply(values: JavaOffsetList) = new OffsetList(values.values.asScala)
+}
+
+/** A list of Offsets (type erasure workaround)
+ *
+ * @param values The offset value
+ */
+class OffsetList(val values: Seq[String]) {
+
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/PartitionList.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/PartitionList.scala
new file mode 100644
index 0000000..f1bf69c
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/scaladsl/PartitionList.scala
@@ -0,0 +1,21 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact.scaladsl
+
+import com.microsoft.azure.iot.iothubreact.javadsl.{PartitionList ⇒ JavaPartitionList}
+
+import scala.collection.JavaConverters._
+
+object PartitionList {
+ def apply(values: Seq[Int]) = new PartitionList(values)
+
+ def apply(values: JavaPartitionList) = new PartitionList(values.values.asScala.map(_.intValue()))
+}
+
+/** A list of Partition IDs (type erasure workaround)
+ *
+ * @param values List of partition IDs
+ */
+class PartitionList(val values: Seq[Int]) {
+
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/DevicePropertiesSink.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/DevicePropertiesSink.scala
new file mode 100644
index 0000000..dc3e061
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/DevicePropertiesSink.scala
@@ -0,0 +1,25 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// TODO: Implement once SDK is ready
+
+package com.microsoft.azure.iot.iothubreact.sinks
+
+import java.util.concurrent.CompletionStage
+
+import akka.Done
+import akka.stream.javadsl.{Sink ⇒ JavaSink}
+import akka.stream.scaladsl.{Sink ⇒ ScalaSink}
+import com.microsoft.azure.iot.iothubreact.{Logger, DeviceProperties}
+
+case class DevicePropertiesSink() extends ISink[DeviceProperties] with Logger {
+
+ throw new NotImplementedError("DevicePropertiesSink is not supported yet")
+
+ def scalaSink(): ScalaSink[DeviceProperties, scala.concurrent.Future[Done]] = {
+ throw new NotImplementedError()
+ }
+
+ def javaSink(): JavaSink[DeviceProperties, CompletionStage[Done]] = {
+ throw new NotImplementedError()
+ }
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/ISink.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/ISink.scala
new file mode 100644
index 0000000..fd7acf5
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/ISink.scala
@@ -0,0 +1,11 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact.sinks
+
+import akka.Done
+
+trait ISink[A] {
+ def scalaSink(): akka.stream.scaladsl.Sink[A, scala.concurrent.Future[Done]]
+
+ def javaSink(): akka.stream.javadsl.Sink[A, java.util.concurrent.CompletionStage[Done]]
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/MessageToDeviceSink.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/MessageToDeviceSink.scala
new file mode 100644
index 0000000..2f6efbf
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/MessageToDeviceSink.scala
@@ -0,0 +1,47 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package com.microsoft.azure.iot.iothubreact.sinks
+
+import java.util.concurrent.CompletionStage
+
+import akka.Done
+import akka.japi.function.Procedure
+import akka.stream.javadsl.{Sink ⇒ JavaSink}
+import akka.stream.scaladsl.{Sink ⇒ ScalaSink}
+import com.microsoft.azure.iot.iothubreact.{Configuration, Logger, MessageToDevice}
+import com.microsoft.azure.iot.service.sdk.{IotHubServiceClientProtocol, ServiceClient}
+
+/** Send messages from cloud to devices
+ */
+case class MessageToDeviceSink() extends ISink[MessageToDevice] with Logger {
+
+ private[iothubreact] val protocol = IotHubServiceClientProtocol.AMQPS
+ private[iothubreact] val timeoutMsecs = 15000
+
+ private[this] val connString = s"HostName=${Configuration.accessHostname};SharedAccessKeyName=${Configuration.accessPolicy};SharedAccessKey=${Configuration.accessKey}"
+ private[this] val serviceClient = ServiceClient.createFromConnectionString(connString, protocol)
+
+ private[this] object JavaSinkProcedure extends Procedure[MessageToDevice] {
+ @scala.throws[Exception](classOf[Exception])
+ override def apply(m: MessageToDevice): Unit = {
+ log.info("Sending message to device " + m.deviceId)
+ serviceClient.sendAsync(m.deviceId, m.message)
+ }
+ }
+
+ log.info(s"Connecting client to ${Configuration.accessHostname} ...")
+ serviceClient.open()
+
+ def scalaSink(): ScalaSink[MessageToDevice, scala.concurrent.Future[Done]] =
+ ScalaSink.foreach[MessageToDevice] {
+ m ⇒ {
+ log.info("Sending message to device " + m.deviceId)
+ serviceClient.sendAsync(m.deviceId, m.message)
+ }
+ }
+
+ def javaSink(): JavaSink[MessageToDevice, CompletionStage[Done]] =
+ JavaSink.foreach[MessageToDevice] {
+ JavaSinkProcedure
+ }
+}
diff --git a/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/MethodOnDeviceSink.scala b/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/MethodOnDeviceSink.scala
new file mode 100644
index 0000000..9b9c778
--- /dev/null
+++ b/src/main/scala/com/microsoft/azure/iot/iothubreact/sinks/MethodOnDeviceSink.scala
@@ -0,0 +1,25 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// TODO: Implement once SDK is ready
+
+package com.microsoft.azure.iot.iothubreact.sinks
+
+import java.util.concurrent.CompletionStage
+
+import akka.Done
+import akka.stream.javadsl.{Sink ⇒ JavaSink}
+import akka.stream.scaladsl.{Sink ⇒ ScalaSink}
+import com.microsoft.azure.iot.iothubreact.{Logger, MethodOnDevice}
+
+case class MethodOnDeviceSink() extends ISink[MethodOnDevice] with Logger {
+
+ throw new NotImplementedError("MethodOnDeviceSink is not supported yet")
+
+ def scalaSink(): ScalaSink[MethodOnDevice, scala.concurrent.Future[Done]] = {
+ throw new NotImplementedError()
+ }
+
+ def javaSink(): JavaSink[MethodOnDevice, CompletionStage[Done]] = {
+ throw new NotImplementedError()
+ }
+}
diff --git a/src/test/resources/application.conf b/src/test/resources/application.conf
index ebab877..ff59382 100644
--- a/src/test/resources/application.conf
+++ b/src/test/resources/application.conf
@@ -1,42 +1,54 @@
-iothub {
- partitions = ${?IOTHUB_CI_PARTITIONS}
- name = ${?IOTHUB_CI_NAME}
- namespace = ${?IOTHUB_CI_NAMESPACE}
- keyName = ${?IOTHUB_CI_ACCESS_KEY0_NAME}
- key = ${?IOTHUB_CI_ACCESS_KEY0_VALUE}
- devices = ${?IOTHUB_CI_DEVICES_JSON_FILE}
+akka {
+ # Options: OFF, ERROR, WARNING, INFO, DEBUG
+ loglevel = "DEBUG"
}
-iothub-stream {
- receiverBatchSize = 3
- receiverTimeout = 5s
-}
+iothub-react {
-iothub-checkpointing {
- enabled = true
- frequency = 15s
- countThreshold = 2000
- timeThreshold = 5min
- storage {
- rwTimeout = 6s
- backendType = "AzureBlob"
- namespace = "iothub-react-checkpoints"
+ connection {
+ partitions = ${?IOTHUB_CI_PARTITIONS}
+ name = ${?IOTHUB_CI_NAME}
+ namespace = ${?IOTHUB_CI_NAMESPACE}
+ accessPolicy = ${?IOTHUB_CI_ACCESS_POLICY_0}
+ accessKey = ${?IOTHUB_CI_ACCESS_KEY_0}
+ devices = ${?IOTHUB_CI_DEVICES_JSON_FILE}
- azureblob {
- lease = 15s
- useEmulator = false
- protocol = "https"
- account = ${?IOTHUB_CHECKPOINT_ACCOUNT}
- key = ${?IOTHUB_CHECKPOINT_KEY}
- }
- cassandra {
- cluster = "localhost:9042"
- replicationFactor = 1
+ hubName = ${?IOTHUB_CI_EVENTHUB_NAME}
+ hubEndpoint = ${?IOTHUB_CI_EVENTHUB_ENDPOINT}
+ hubPartitions = ${?IOTHUB_CI_EVENTHUB_PARTITIONS}
+ accessHostName = ${?IOTHUB_CI_ACCESS_HOSTNAME}
+ }
+
+ streaming {
+ consumerGroup = "$Default"
+ receiverBatchSize = 3
+ receiverTimeout = 5s
+ }
+
+ checkpointing {
+ // Leave this enabled even if CP is not used. Tests must turn CP off explicitly.
+ enabled = true
+ frequency = 15s
+ countThreshold = 2000
+ timeThreshold = 5min
+ storage {
+ rwTimeout = 6s
+ backendType = "AzureBlob"
+ namespace = "iothub-react-checkpoints"
+
+ azureblob {
+ lease = 15s
+ useEmulator = false
+ protocol = "https"
+ account = ${?IOTHUB_CHECKPOINT_ACCOUNT}
+ key = ${?IOTHUB_CHECKPOINT_KEY}
+ }
+
+ cassandra {
+ cluster = "localhost:9042"
+ replicationFactor = 1
+ }
}
}
}
-
-akka {
- loglevel = "INFO"
-}
diff --git a/src/test/scala/api/API.scala b/src/test/scala/api/API.scala
new file mode 100644
index 0000000..a056d62
--- /dev/null
+++ b/src/test/scala/api/API.scala
@@ -0,0 +1,274 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// Namespace chosen to avoid access to internal classes
+package api
+
+// No global imports to make easier detecting breaking changes
+
+class APIIsBackwardCompatible extends org.scalatest.FeatureSpec {
+
+ info("As a developer using Azure IoT hub React")
+ info("I want to be able to upgrade to new minor versions without changing my code")
+ info("So I can benefit from improvements without excessive development costs")
+
+ feature("Version 0.x is backward compatible") {
+
+ scenario("Using MessageFromDevice") {
+ import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+
+ val data: Option[com.microsoft.azure.eventhubs.EventData] = None
+ val partition: Option[Int] = Some(1)
+
+ // Test properties
+ val message1 = new MessageFromDevice(data, partition)
+ lazy val properties: java.util.Map[String, String] = message1.properties
+ lazy val isKeepAlive: Boolean = message1.isKeepAlive
+ lazy val messageType: String = message1.messageType
+ lazy val contentType: String = message1.contentType
+ lazy val created: java.time.Instant = message1.created
+ lazy val offset: String = message1.offset
+ lazy val sequenceNumber: Long = message1.sequenceNumber
+ lazy val deviceId: String = message1.deviceId
+ lazy val messageId: String = message1.messageId
+ lazy val content: Array[Byte] = message1.content
+ lazy val contentAsString: String = message1.contentAsString
+ assert(message1.isKeepAlive == false)
+
+ // Named parameters
+ val message2: MessageFromDevice = new MessageFromDevice(data = data, partition = partition)
+
+ // Keepalive
+ val message3: MessageFromDevice = new MessageFromDevice(data, None)
+ assert(message3.isKeepAlive == true)
+ }
+
+ scenario("Using Scala DSL OffsetList") {
+ import com.microsoft.azure.iot.iothubreact.scaladsl.OffsetList
+
+ val o1: String = "123"
+ val o2: String = "foo"
+
+ // Ctors
+ val offset1: OffsetList = OffsetList(Seq(o1, o2))
+ val offset2: OffsetList = new OffsetList(Seq(o1, o2))
+
+ // Named parameters
+ val offset3: OffsetList = OffsetList(values = Seq(o1, o2))
+ val offset4: OffsetList = new OffsetList(values = Seq(o1, o2))
+
+ assert(offset1.values(0) == o1)
+ assert(offset1.values(1) == o2)
+ assert(offset2.values(0) == o1)
+ assert(offset2.values(1) == o2)
+ assert(offset3.values(0) == o1)
+ assert(offset3.values(1) == o2)
+ assert(offset4.values(0) == o1)
+ assert(offset4.values(1) == o2)
+ }
+
+ scenario("Using Java DSL OffsetList") {
+ import com.microsoft.azure.iot.iothubreact.javadsl.OffsetList
+
+ val o1: String = "123"
+ val o2: String = "foo"
+
+ // Ctors
+ val offset1: OffsetList = new OffsetList(java.util.Arrays.asList(o1, o2))
+
+ // Named parameters
+ val offset2: OffsetList = new OffsetList(values = java.util.Arrays.asList(o1, o2))
+
+ assert(offset1.values.get(0) == o1)
+ assert(offset1.values.get(1) == o2)
+ assert(offset2.values.get(0) == o1)
+ assert(offset2.values.get(1) == o2)
+ }
+
+ scenario("Using Scala DSL PartitionList") {
+ import com.microsoft.azure.iot.iothubreact.scaladsl.PartitionList
+
+ val o1: Int = 1
+ val o2: Int = 5
+
+ // Ctors
+ val offset1: PartitionList = PartitionList(Seq(o1, o2))
+ val offset2: PartitionList = new PartitionList(Seq(o1, o2))
+
+ // Named parameters
+ val offset3: PartitionList = PartitionList(values = Seq(o1, o2))
+ val offset4: PartitionList = new PartitionList(values = Seq(o1, o2))
+
+ assert(offset1.values(0) == o1)
+ assert(offset1.values(1) == o2)
+ assert(offset2.values(0) == o1)
+ assert(offset2.values(1) == o2)
+ assert(offset3.values(0) == o1)
+ assert(offset3.values(1) == o2)
+ assert(offset4.values(0) == o1)
+ assert(offset4.values(1) == o2)
+ }
+
+ scenario("Using Java DSL PartitionList") {
+ import com.microsoft.azure.iot.iothubreact.javadsl.PartitionList
+
+ val o1: Int = 1
+ val o2: Int = 5
+
+ // Ctors
+ val offset1: PartitionList = new PartitionList(java.util.Arrays.asList(o1, o2))
+
+ // Named parameters
+ val offset2: PartitionList = new PartitionList(values = java.util.Arrays.asList(o1, o2))
+
+ assert(offset1.values.get(0) == o1)
+ assert(offset1.values.get(1) == o2)
+ assert(offset2.values.get(0) == o1)
+ assert(offset2.values.get(1) == o2)
+ }
+
+ scenario("Using ResumeOnError") {
+ import akka.actor.ActorSystem
+ import akka.stream.ActorMaterializer
+ import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+
+ val as: ActorSystem = actorSystem
+ val mat: ActorMaterializer = materializer
+ }
+
+ scenario("Using StopOnError") {
+ import akka.actor.ActorSystem
+ import akka.stream.ActorMaterializer
+ import com.microsoft.azure.iot.iothubreact.StopOnError._
+
+ val as: ActorSystem = actorSystem
+ val mat: ActorMaterializer = materializer
+ }
+
+ scenario("Using CheckpointBackend") {
+ import com.microsoft.azure.iot.iothubreact.checkpointing.backends.CheckpointBackend
+
+ class CustomBackend extends CheckpointBackend {
+ override def readOffset(partition: Int): String = {
+ return ""
+ }
+
+ override def writeOffset(partition: Int, offset: String): Unit = {}
+ }
+
+ val backend: CustomBackend = new CustomBackend()
+ assert(backend.checkpointNamespace == "iothub-react-checkpoints")
+ }
+
+ scenario("Using Message Type") {
+ import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+ import com.microsoft.azure.iot.iothubreact.filters.MessageType
+
+ val filter1: (MessageFromDevice) ⇒ Boolean = MessageType("some")
+ val filter2: MessageType = new MessageType("some")
+ }
+
+ scenario("Using Scala DSL IoTHub") {
+ import java.time.Instant
+
+ import akka.NotUsed
+ import akka.stream.scaladsl.Source
+ import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+ import com.microsoft.azure.iot.iothubreact.scaladsl.{IoTHub, OffsetList, PartitionList}
+
+ val hub1: IoTHub = new IoTHub()
+ val hub2: IoTHub = IoTHub()
+
+ val offsets: OffsetList = OffsetList(Seq("1", "0", "0", "-1", "234623"))
+ val partitions: PartitionList = PartitionList(Seq(0, 1, 3))
+
+ var source: Source[MessageFromDevice, NotUsed] = hub1.source()
+
+ source = hub1.source(partitions)
+ source = hub1.source(partitions = partitions)
+
+ source = hub1.source(Instant.now())
+ source = hub1.source(startTime = Instant.now())
+
+ source = hub1.source(Instant.now(), partitions)
+ source = hub1.source(startTime = Instant.now(), partitions = partitions)
+
+ source = hub1.source(false)
+ source = hub1.source(withCheckpoints = false)
+
+ source = hub1.source(false, partitions)
+ source = hub1.source(withCheckpoints = false, partitions = partitions)
+
+ source = hub1.source(offsets)
+ source = hub1.source(offsets = offsets)
+
+ source = hub1.source(offsets, partitions)
+ source = hub1.source(offsets = offsets, partitions = partitions)
+
+ source = hub1.source(Instant.now(), false)
+ source = hub1.source(startTime = Instant.now(), withCheckpoints = false)
+
+ source = hub1.source(Instant.now(), false, partitions)
+ source = hub1.source(startTime = Instant.now(), withCheckpoints = false, partitions = partitions)
+
+ source = hub1.source(offsets, false)
+ source = hub1.source(offsets = offsets, withCheckpoints = false)
+
+ source = hub1.source(offsets, false, partitions)
+ source = hub1.source(offsets = offsets, withCheckpoints = false, partitions = partitions)
+
+ hub1.close()
+ hub2.close()
+ }
+
+ scenario("Using Java DSL IoTHub") {
+ import java.time.Instant
+
+ import akka.NotUsed
+ import akka.stream.javadsl.Source
+ import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+ import com.microsoft.azure.iot.iothubreact.javadsl.{IoTHub, OffsetList, PartitionList}
+
+ val hub: IoTHub = new IoTHub()
+
+ val offsets: OffsetList = new OffsetList(java.util.Arrays.asList("1", "0", "0", "0", "-1", "234623"))
+ val partitions: PartitionList = new PartitionList(java.util.Arrays.asList(0, 1, 3))
+
+ var source: Source[MessageFromDevice, NotUsed] = hub.source()
+
+ source = hub.source(partitions)
+ source = hub.source(partitions = partitions)
+
+ source = hub.source(Instant.now())
+ source = hub.source(startTime = Instant.now())
+
+ source = hub.source(Instant.now(), partitions)
+ source = hub.source(startTime = Instant.now(), partitions = partitions)
+
+ source = hub.source(false)
+ source = hub.source(withCheckpoints = false)
+
+ source = hub.source(false, partitions)
+ source = hub.source(withCheckpoints = false, partitions = partitions)
+
+ source = hub.source(offsets)
+ source = hub.source(offsets = offsets)
+
+ source = hub.source(offsets, partitions)
+ source = hub.source(offsets = offsets, partitions = partitions)
+
+ source = hub.source(Instant.now(), false)
+ source = hub.source(startTime = Instant.now(), withCheckpoints = false)
+
+ source = hub.source(Instant.now(), false, partitions)
+ source = hub.source(startTime = Instant.now(), withCheckpoints = false, partitions = partitions)
+
+ source = hub.source(offsets, false)
+ source = hub.source(offsets = offsets, withCheckpoints = false)
+
+ source = hub.source(offsets, false, partitions)
+ source = hub.source(offsets = offsets, withCheckpoints = false, partitions = partitions)
+
+ hub.close()
+ }
+ }
+}
diff --git a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/IoTHubReactiveStreamingUserStory.scala b/src/test/scala/com/microsoft/azure/iot/iothubreact/test/IoTHubReactiveStreamingUserStory.scala
deleted file mode 100644
index 9656bc0..0000000
--- a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/IoTHubReactiveStreamingUserStory.scala
+++ /dev/null
@@ -1,192 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-package com.microsoft.azure.iot.iothubreact.test
-
-import java.time.Instant
-
-import akka.NotUsed
-import akka.actor.Props
-import akka.pattern.ask
-import akka.stream.scaladsl.{Sink, Source}
-import com.microsoft.azure.iot.iothubreact.IoTMessage
-import com.microsoft.azure.iot.iothubreact.scaladsl.{IoTHub, IoTHubPartition}
-import com.microsoft.azure.iot.iothubreact.test.helpers._
-import org.scalatest._
-
-import scala.collection.parallel.mutable
-import scala.concurrent.Await
-import scala.concurrent.duration._
-import scala.language.postfixOps
-
-/** Tests streaming against Azure IoT hub endpoint
- *
- * Note: the tests require an actual hub ready to use
- */
-class IoTHubReactiveStreamingUserStory
- extends FeatureSpec
- with GivenWhenThen
- with ReactiveStreaming
- with Logger {
-
- info("As a client of Azure IoT hub")
- info("I want to be able to receive all the messages as a stream")
- info("So I can process them asynchronously and at scale")
-
- val counter = actorSystem.actorOf(Props[Counter], "Counter")
- counter ! "reset"
-
- def readCounter: Long = {
- Await.result(counter.ask("get")(5 seconds), 5 seconds).asInstanceOf[Long]
- }
-
- feature("All IoT messages are presented as an ordered stream") {
-
- scenario("Developer wants to retrieve IoT messages") {
-
- Given("An IoT hub is configured")
- val hub = IoTHub()
- val hubPartition = IoTHubPartition(1)
-
- When("A developer wants to fetch messages from Azure IoT hub")
- val messagesFromOnePartition: Source[IoTMessage, NotUsed] = hubPartition.source(false)
- val messagesFromAllPartitions: Source[IoTMessage, NotUsed] = hub.source(false)
- val messagesFromNowOn: Source[IoTMessage, NotUsed] = hub.source(Instant.now(), false)
-
- Then("The messages are presented as a stream")
- messagesFromOnePartition.to(Sink.ignore)
- messagesFromAllPartitions.to(Sink.ignore)
- messagesFromNowOn.to(Sink.ignore)
- }
-
- scenario("Application wants to retrieve all IoT messages") {
-
- // How many seconds we allow the test to wait for messages from the stream
- val TestTimeout = 60 seconds
- val DevicesCount = 5
- val MessagesPerDevice = 4
- val expectedMessageCount = DevicesCount * MessagesPerDevice
-
- // A label shared by all the messages, to filter out data sent by other tests
- val testRunId: String = "[RetrieveAll-" + java.util.UUID.randomUUID().toString + "]"
-
- // We'll use this as the streaming start date
- val startTime = Instant.now()
- log.info(s"Test run: ${testRunId}, Start time: ${startTime}")
-
- Given("An IoT hub is configured")
- val messages = IoTHub().source(startTime, false)
-
- And(s"${DevicesCount} devices have sent ${MessagesPerDevice} messages each")
- for (i ← 0 until DevicesCount) {
- val device = new Device("device" + (10000 + i))
- for (i ← 1 to MessagesPerDevice) device.sendMessage(testRunId, i)
- device.disconnect()
- }
- log.info(s"Messages sent: $expectedMessageCount")
-
- When("A client application processes messages from the stream")
- counter ! "reset"
- val count = Sink.foreach[IoTMessage] {
- m ⇒ counter ! "inc"
- }
-
- messages
- .filter(m ⇒ m.contentAsString contains testRunId)
- .to(count)
- .run()
-
- Then("Then the client application receives all the messages sent")
- var time = TestTimeout.toMillis.toInt
- val pause = time / 10
- var actualMessageCount = readCounter
- while (time > 0 && actualMessageCount < expectedMessageCount) {
- Thread.sleep(pause)
- time -= pause
- actualMessageCount = readCounter
- log.info(s"Messages received so far: ${actualMessageCount} of ${expectedMessageCount} [Time left ${time / 1000} secs]")
- }
-
- assert(
- actualMessageCount == expectedMessageCount,
- s"Expecting ${expectedMessageCount} messages but received ${actualMessageCount}")
- }
-
- // Note: messages are sent in parallel to obtain some level of mix in the
- // storage, so do not refactor, i.e. don't do one device at a time.
- scenario("Customer needs to process IoT messages in the right order") {
-
- // How many seconds we allow the test to wait for messages from the stream
- val TestTimeout = 120 seconds
- val DevicesCount = 10
- val MessagesPerDevice = 200
- val expectedMessageCount = DevicesCount * MessagesPerDevice
-
- // A label shared by all the messages, to filter out data sent by other tests
- val testRunId: String = "[VerifyOrder-" + java.util.UUID.randomUUID().toString + "]"
-
- // We'll use this as the streaming start date
- val startTime = Instant.now()
- log.info(s"Test run: ${testRunId}, Start time: ${startTime}")
-
- Given("An IoT hub is configured")
- val messages = IoTHub().source(startTime, false)
-
- And(s"${DevicesCount} devices have sent ${MessagesPerDevice} messages each")
- val devices = new collection.mutable.ListMap[Int, Device]()
-
- for (i ← 0 until DevicesCount)
- devices(i) = new Device("device" + (10000 + i))
-
- for (i ← 1 to MessagesPerDevice)
- for (i ← 0 until DevicesCount)
- devices(i).sendMessage(testRunId, i)
-
- for (i ← 0 until DevicesCount)
- devices(i).disconnect()
-
- log.info(s"Messages sent: $expectedMessageCount")
-
- When("A client application processes messages from the stream")
-
- Then("Then the client receives all the messages ordered within each device")
- counter ! "reset"
- val cursors = new mutable.ParHashMap[String, Long]
- val verifier = Sink.foreach[IoTMessage] {
- m ⇒ {
- counter ! "inc"
- log.debug(s"device: ${m.deviceId}, seq: ${m.sequenceNumber} ")
-
- if (!cursors.contains(m.deviceId)) {
- cursors.put(m.deviceId, m.sequenceNumber)
- }
- if (cursors(m.deviceId) > m.sequenceNumber) {
- fail(s"Message out of order. " +
- s"Device ${m.deviceId}, message ${m.sequenceNumber} arrived " +
- s"after message ${cursors(m.deviceId)}")
- }
- cursors.put(m.deviceId, m.sequenceNumber)
- }
- }
-
- messages
- .filter(m ⇒ m.contentAsString contains (testRunId))
- .to(verifier)
- .run()
-
- // Wait till all messages have been verified
- var time = TestTimeout.toMillis.toInt
- val pause = time / 12
- var actualMessageCount = readCounter
- while (time > 0 && actualMessageCount < expectedMessageCount) {
- Thread.sleep(pause)
- time -= pause
- actualMessageCount = readCounter
- log.info(s"Messages received so far: ${actualMessageCount} of ${expectedMessageCount} [Time left ${time / 1000} secs]")
- }
-
- assert(
- actualMessageCount == expectedMessageCount,
- s"Expecting ${expectedMessageCount} messages but received ${actualMessageCount}")
- }
- }
-}
diff --git a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Configuration.scala b/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Configuration.scala
deleted file mode 100644
index 171d028..0000000
--- a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Configuration.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-package com.microsoft.azure.iot.iothubreact.test.helpers
-
-import java.nio.file.{Files, Paths}
-
-import com.fasterxml.jackson.databind.ObjectMapper
-import com.fasterxml.jackson.module.scala.DefaultScalaModule
-import com.microsoft.azure.eventhubs.EventHubClient
-import com.typesafe.config.{Config, ConfigFactory}
-
-import scala.reflect.io.File
-
-/* Test configuration settings */
-private object Configuration {
-
- private[this] val conf: Config = ConfigFactory.load()
-
- // Read-only settings
- val iotHubPartitions: Int = conf.getInt("iothub.partitions")
- val iotHubNamespace : String = conf.getString("iothub.namespace")
- val iotHubName : String = conf.getString("iothub.name")
- val iotHubKeyName : String = conf.getString("iothub.keyName")
- val iotHubKey : String = conf.getString("iothub.key")
-
- // Tests can override these
- var iotReceiverConsumerGroup: String = EventHubClient.DEFAULT_CONSUMER_GROUP_NAME
- var receiverTimeout : Long = conf.getDuration("iothub-stream.receiverTimeout").toMillis
- var receiverBatchSize : Int = conf.getInt("iothub-stream.receiverBatchSize")
-
- // Read devices configuration from JSON file
- private[this] val jsonParser = new ObjectMapper()
- jsonParser.registerModule(DefaultScalaModule)
- private[this] lazy val devicesJsonFile = conf.getString("iothub.devices")
- private[this] lazy val devicesJson = File(devicesJsonFile).slurp()
- private[this] lazy val devices = jsonParser.readValue(devicesJson, classOf[Array[DeviceCredentials]])
-
- def deviceCredentials(id: String): DeviceCredentials = devices.find(x ⇒ x.deviceId == id).get
-
- if (!Files.exists(Paths.get(devicesJsonFile))) {
- throw new RuntimeException("Devices credentials not found")
- }
-}
diff --git a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Counter.scala b/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Counter.scala
deleted file mode 100644
index e7bffc0..0000000
--- a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Counter.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-package com.microsoft.azure.iot.iothubreact.test.helpers
-
-import java.util.concurrent.Executors
-
-import akka.actor.Actor
-
-import scala.concurrent.ExecutionContext
-
-/* Thread safe counter */
-class Counter extends Actor {
-
- implicit val executionContext = ExecutionContext
- .fromExecutorService(Executors.newFixedThreadPool(sys.runtime.availableProcessors))
-
- private[this] var count: Long = 0
-
- override def receive: Receive = {
- case "reset" ⇒ count = 0
- case "inc" ⇒ count += 1
- case "get" ⇒ sender() ! count
- }
-}
diff --git a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/ReactiveStreaming.scala b/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/ReactiveStreaming.scala
deleted file mode 100644
index 8e38ef9..0000000
--- a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/ReactiveStreaming.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-package com.microsoft.azure.iot.iothubreact.test.helpers
-
-import akka.actor.ActorSystem
-import akka.stream.{ActorMaterializer, ActorMaterializerSettings, Supervision}
-
-/** Initialize reactive streaming
- *
- * @todo Don't use supervisor with Akka streams
- */
-trait ReactiveStreaming {
- val decider: Supervision.Decider = {
- case e: Exception ⇒
- println(e.getMessage)
- Supervision.Resume
- }
-
- implicit val actorSystem = ActorSystem("Tests")
- implicit val materializer = ActorMaterializer(ActorMaterializerSettings(actorSystem)
- .withSupervisionStrategy(decider))
-}
diff --git a/src/test/scala/it/AllIoTDeviceMessagesAreDelivered.scala b/src/test/scala/it/AllIoTDeviceMessagesAreDelivered.scala
new file mode 100644
index 0000000..6bc26fa
--- /dev/null
+++ b/src/test/scala/it/AllIoTDeviceMessagesAreDelivered.scala
@@ -0,0 +1,99 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package it
+
+import java.time.Instant
+
+import akka.actor.Props
+import akka.pattern.ask
+import akka.stream.scaladsl.Sink
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+import com.microsoft.azure.iot.iothubreact.scaladsl.IoTHub
+import it.helpers.{Counter, Device}
+import org.scalatest._
+
+import scala.concurrent.Await
+import scala.concurrent.duration._
+import scala.language.postfixOps
+
+class AllIoTDeviceMessagesAreDelivered extends FeatureSpec with GivenWhenThen {
+
+ info("As a client of Azure IoT hub")
+ info("I want to be able to receive all device messages")
+ info("So I can process them all")
+
+ // A label shared by all the messages, to filter out data sent by other tests
+ val testRunId: String = s"[${this.getClass.getName}-" + java.util.UUID.randomUUID().toString + "]"
+
+ val counter = actorSystem.actorOf(Props[Counter], this.getClass.getName + "Counter")
+ counter ! "reset"
+
+ def readCounter: Long = {
+ Await.result(counter.ask("get")(5 seconds), 5 seconds).asInstanceOf[Long]
+ }
+
+ feature("All IoT device messages are delivered") {
+
+ scenario("Application wants to retrieve all IoT messages") {
+
+ // How many seconds we allow the test to wait for messages from the stream
+ val TestTimeout = 60 seconds
+ val DevicesCount = 5
+ val MessagesPerDevice = 3
+ val expectedMessageCount = DevicesCount * MessagesPerDevice
+
+ // Create devices
+ val devices = new collection.mutable.ListMap[Int, Device]()
+ for (deviceNumber ← 0 until DevicesCount) devices(deviceNumber) = new Device("device" + (10000 + deviceNumber))
+
+ // We'll use this as the streaming start date
+ val startTime = Instant.now().minusSeconds(30)
+ log.info(s"Test run: ${testRunId}, Start time: ${startTime}")
+
+ Given("An IoT hub is configured")
+ val hub = IoTHub()
+ val messages = hub.source(startTime, false)
+
+ And(s"${DevicesCount} devices have sent ${MessagesPerDevice} messages each")
+ for (msgNumber ← 1 to MessagesPerDevice) {
+ for (deviceNumber ← 0 until DevicesCount) {
+ devices(deviceNumber).sendMessage(testRunId, msgNumber)
+ // Workaround for issue 995
+ if (msgNumber == 1) devices(deviceNumber).waitConfirmation()
+ }
+ for (deviceNumber ← 0 until DevicesCount) devices(deviceNumber).waitConfirmation()
+ }
+
+ for (deviceNumber ← 0 until DevicesCount) devices(deviceNumber).disconnect()
+
+ log.info(s"Messages sent: $expectedMessageCount")
+
+ When("A client application processes messages from the stream")
+ counter ! "reset"
+ val count = Sink.foreach[MessageFromDevice] {
+ m ⇒ counter ! "inc"
+ }
+
+ messages
+ .filter(m ⇒ m.contentAsString contains testRunId)
+ .runWith(count)
+
+ Then("Then the client application receives all the messages sent")
+ var time = TestTimeout.toMillis.toInt
+ val pause = time / 10
+ var actualMessageCount = readCounter
+ while (time > 0 && actualMessageCount < expectedMessageCount) {
+ Thread.sleep(pause)
+ time -= pause
+ actualMessageCount = readCounter
+ log.info(s"Messages received so far: ${actualMessageCount} of ${expectedMessageCount} [Time left ${time / 1000} secs]")
+ }
+
+ hub.close()
+
+ assert(actualMessageCount == expectedMessageCount,
+ s"Expecting ${expectedMessageCount} messages but received ${actualMessageCount}")
+ }
+ }
+}
diff --git a/src/test/scala/it/DeviceIoTMessagesAreDeliveredInOrder.scala b/src/test/scala/it/DeviceIoTMessagesAreDeliveredInOrder.scala
new file mode 100644
index 0000000..5de7edd
--- /dev/null
+++ b/src/test/scala/it/DeviceIoTMessagesAreDeliveredInOrder.scala
@@ -0,0 +1,121 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package it
+
+import java.time.Instant
+
+import akka.actor.Props
+import akka.pattern.ask
+import akka.stream.scaladsl.Sink
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+import com.microsoft.azure.iot.iothubreact.scaladsl.IoTHub
+import it.helpers.{Counter, Device}
+import org.scalatest._
+
+import scala.collection.parallel.mutable
+import scala.concurrent.Await
+import scala.concurrent.duration._
+import scala.language.postfixOps
+
+class DeviceIoTMessagesAreDeliveredInOrder extends FeatureSpec with GivenWhenThen {
+
+ info("As a client of Azure IoT hub")
+ info("I want to receive the messages in order")
+ info("So I can process them in order")
+
+ // A label shared by all the messages, to filter out data sent by other tests
+ val testRunId: String = s"[${this.getClass.getName}-" + java.util.UUID.randomUUID().toString + "]"
+
+ val counter = actorSystem.actorOf(Props[Counter], this.getClass.getName + "Counter")
+ counter ! "reset"
+
+ def readCounter: Long = {
+ Await.result(counter.ask("get")(5 seconds), 5 seconds).asInstanceOf[Long]
+ }
+
+ feature("Device IoT messages are delivered in order") {
+
+ // Note: messages are sent in parallel to obtain some level of mix in the
+ // storage, so do not refactor, i.e. don't do one device at a time.
+ scenario("Customer needs to process IoT messages in the right order") {
+
+ // How many seconds we allow the test to wait for messages from the stream
+ val TestTimeout = 120 seconds
+ val DevicesCount = 25
+ val MessagesPerDevice = 100
+ val expectedMessageCount = DevicesCount * MessagesPerDevice
+
+ // Initialize device objects
+ val devices = new collection.mutable.ListMap[Int, Device]()
+ for (deviceNumber ← 0 until DevicesCount) devices(deviceNumber) = new Device("device" + (10000 + deviceNumber))
+
+ // We'll use this as the streaming start date
+ val startTime = Instant.now().minusSeconds(30)
+ log.info(s"Test run: ${testRunId}, Start time: ${startTime}")
+
+ Given("An IoT hub is configured")
+ val hub = IoTHub()
+ val messages = hub.source(startTime, false)
+
+ And(s"${DevicesCount} devices have sent ${MessagesPerDevice} messages each")
+ for (msgNumber ← 1 to MessagesPerDevice) {
+ for (deviceNumber ← 0 until DevicesCount) {
+ devices(deviceNumber).sendMessage(testRunId, msgNumber)
+ // temporary workaround for issue 995
+ if (msgNumber == 1) devices(deviceNumber).waitConfirmation()
+ }
+
+ for (deviceNumber ← 0 until DevicesCount) devices(deviceNumber).waitConfirmation()
+ }
+ for (deviceNumber ← 0 until DevicesCount) devices(deviceNumber).disconnect()
+ log.info(s"Messages sent: $expectedMessageCount")
+
+ When("A client application processes messages from the stream")
+
+ Then("Then the client receives all the messages ordered within each device")
+ counter ! "reset"
+ val cursors = new mutable.ParHashMap[String, Long]
+ val verifier = Sink.foreach[MessageFromDevice] {
+ m ⇒ {
+ counter ! "inc"
+ log.debug(s"device: ${m.deviceId}, seq: ${m.sequenceNumber} ")
+
+ if (!cursors.contains(m.deviceId)) {
+ cursors.put(m.deviceId, m.sequenceNumber)
+ }
+ if (cursors(m.deviceId) > m.sequenceNumber) {
+ fail(s"Message out of order. " +
+ s"Device ${m.deviceId}, message ${m.sequenceNumber} arrived " +
+ s"after message ${cursors(m.deviceId)}")
+ }
+ cursors.put(m.deviceId, m.sequenceNumber)
+ }
+ }
+
+ messages
+ .filter(m ⇒ m.contentAsString contains (testRunId))
+ .runWith(verifier)
+
+ // Wait till all messages have been verified
+ var time = TestTimeout.toMillis.toInt
+ val pause = time / 12
+ var actualMessageCount = readCounter
+ while (time > 0 && actualMessageCount < expectedMessageCount) {
+ Thread.sleep(pause)
+ time -= pause
+ actualMessageCount = readCounter
+ log.info(s"Messages received so far: ${actualMessageCount} of ${expectedMessageCount} [Time left ${time / 1000} secs]")
+ }
+
+ log.info("Stopping stream")
+ hub.close()
+
+ log.info(s"actual messages ${actualMessageCount}")
+
+ assert(
+ actualMessageCount == expectedMessageCount,
+ s"Expecting ${expectedMessageCount} messages but received ${actualMessageCount}")
+ }
+ }
+}
diff --git a/src/test/scala/it/IoTHubReactHasAnAwesomeAPI.scala b/src/test/scala/it/IoTHubReactHasAnAwesomeAPI.scala
new file mode 100644
index 0000000..57b70d7
--- /dev/null
+++ b/src/test/scala/it/IoTHubReactHasAnAwesomeAPI.scala
@@ -0,0 +1,38 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package it
+
+import java.time.Instant
+
+import akka.NotUsed
+import akka.stream.scaladsl.{Sink, Source}
+import com.microsoft.azure.iot.iothubreact.MessageFromDevice
+import com.microsoft.azure.iot.iothubreact.scaladsl.{IoTHub, IoTHubPartition}
+import org.scalatest._
+
+class IoTHubReactHasAnAwesomeAPI extends FeatureSpec with GivenWhenThen {
+
+ info("As a client of Azure IoT hub")
+ info("I want to be able to receive device messages as a stream")
+ info("So I can process them asynchronously and at scale")
+
+ feature("IoT Hub React has an awesome API") {
+
+ scenario("Developer wants to retrieve IoT messages") {
+
+ Given("An IoT hub is configured")
+ val hub = IoTHub()
+ val hubPartition = IoTHubPartition(1)
+
+ When("A developer wants to fetch messages from Azure IoT hub")
+ val messagesFromAllPartitions: Source[MessageFromDevice, NotUsed] = hub.source(false)
+ val messagesFromNowOn: Source[MessageFromDevice, NotUsed] = hub.source(Instant.now(), false)
+
+ Then("The messages are presented as a stream")
+ messagesFromAllPartitions.to(Sink.ignore)
+ messagesFromNowOn.to(Sink.ignore)
+
+ hub.close()
+ }
+ }
+}
diff --git a/src/test/scala/it/TODO.txt b/src/test/scala/it/TODO.txt
new file mode 100644
index 0000000..4e4c939
--- /dev/null
+++ b/src/test/scala/it/TODO.txt
@@ -0,0 +1,5 @@
+Tests to add:
+
+* (UT) Applications using IoTHubReact only to receive, don't need to set `accessHostName`
+* (UT) Applications using IoTHubReact only to set, don't need to set `hubName`, `hubEndpoint`, `hubPartitions`
+* (IT) Test sink end to end
diff --git a/src/test/scala/it/TestConnectivity.scala b/src/test/scala/it/TestConnectivity.scala
new file mode 100644
index 0000000..0c71e7b
--- /dev/null
+++ b/src/test/scala/it/TestConnectivity.scala
@@ -0,0 +1,100 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package it
+
+import java.time.Instant
+
+import com.microsoft.azure.eventhubs.{EventHubClient, PartitionReceiver}
+import com.microsoft.azure.iot.iothubreact.ResumeOnError._
+import com.microsoft.azure.servicebus.ConnectionStringBuilder
+import it.helpers.{Configuration, Device}
+import org.scalatest._
+
+import scala.collection.JavaConverters._
+import scala.language.{implicitConversions, postfixOps}
+
+class TestConnectivity extends FeatureSpec with GivenWhenThen {
+
+ info("As a test runner")
+ info("I want to connect to EventuHub")
+ info("So I can run the test suite")
+
+ // A label shared by all the messages, to filter out data sent by other tests
+ val testRunId = s"[${this.getClass.getName}-" + java.util.UUID.randomUUID().toString + "]"
+ val startTime = Instant.now().minusSeconds(60)
+
+ feature("The test suite can connect to IoT Hub") {
+
+ scenario("The test uses the configured credentials") {
+
+ // Enough devices to hit the first partitions, so that the test ends quickly
+ val DevicesCount = 10
+
+ // Create devices
+ val devices = new collection.mutable.ListMap[Int, Device]()
+ for (deviceNumber ← 0 until DevicesCount) devices(deviceNumber) = new Device("device" + (10000 + deviceNumber))
+
+ // Send a message from each device
+ for (deviceNumber ← 0 until DevicesCount) {
+ devices(deviceNumber).sendMessage(testRunId, 0)
+ // Workaround for issue 995
+ devices(deviceNumber).waitConfirmation()
+ }
+
+ // Wait and disconnect
+ for (deviceNumber ← 0 until DevicesCount) {
+ devices(deviceNumber).waitConfirmation()
+ devices(deviceNumber).disconnect()
+ }
+
+ val connString = new ConnectionStringBuilder(
+ Configuration.iotHubNamespace,
+ Configuration.iotHubName,
+ Configuration.accessPolicy,
+ Configuration.accessKey).toString
+
+ log.info(s"Connecting to IoT Hub")
+ val client = EventHubClient.createFromConnectionStringSync(connString)
+
+ var found = false
+ var attempts = 0
+ var p = 0
+
+ // Check that at least one message arrived to IoT Hub
+ while (!found && p < Configuration.iotHubPartitions) {
+
+ log.info(s"Checking partition ${p}")
+ val receiver: PartitionReceiver = client.createReceiverSync(Configuration.receiverConsumerGroup, p.toString, startTime)
+
+ log.info("Receiver getEpoch(): " + receiver.getEpoch)
+ log.info("Receiver getPartitionId(): " + receiver.getPartitionId)
+ log.info("Receiver getPrefetchCount(): " + receiver.getPrefetchCount)
+ log.info("Receiver getReceiveTimeout(): " + receiver.getReceiveTimeout)
+
+ attempts = 0
+ while (!found && attempts < 100) {
+ attempts += 1
+
+ log.info(s"Receiving batch ${attempts}")
+ val records = receiver.receiveSync(999)
+ if (records == null) {
+ attempts = Int.MaxValue
+ log.info("This partition is empty")
+ } else {
+ val messages = records.asScala
+ log.info(s"Messages retrieved ${messages.size}")
+
+ val matching = messages.filter(e ⇒ new String(e.getBody) contains testRunId)
+ log.info(s"Matching messages ${matching.size}")
+
+ found = (matching.size > 0)
+ }
+ }
+
+ p += 1
+ }
+
+ assert(found, s"Expecting to find at least one of the messages sent")
+ }
+ }
+}
diff --git a/src/test/scala/it/helpers/Configuration.scala b/src/test/scala/it/helpers/Configuration.scala
new file mode 100644
index 0000000..9b9154e
--- /dev/null
+++ b/src/test/scala/it/helpers/Configuration.scala
@@ -0,0 +1,52 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package it.helpers
+
+import java.nio.file.{Files, Paths}
+
+import com.microsoft.azure.eventhubs.EventHubClient
+import com.typesafe.config.{Config, ConfigFactory}
+import org.json4s._
+import org.json4s.jackson.JsonMethods._
+import scala.reflect.io.File
+
+/* Test configuration settings */
+object Configuration {
+
+ // JSON parser setup, brings in default date formats etc.
+ implicit val formats = DefaultFormats
+
+ private[this] val confConnPath = "iothub-react.connection."
+ private[this] val confStreamingPath = "iothub-react.streaming."
+
+ private[this] val conf: Config = ConfigFactory.load()
+
+ // Read-only settings
+ val iotHubNamespace : String = conf.getString(confConnPath + "namespace")
+ val iotHubName : String = conf.getString(confConnPath + "name")
+ val iotHubPartitions: Int = conf.getInt(confConnPath + "partitions")
+ val accessPolicy : String = conf.getString(confConnPath + "accessPolicy")
+ val accessKey : String = conf.getString(confConnPath + "accessKey")
+
+ // Tests can override these
+ var receiverConsumerGroup: String = EventHubClient.DEFAULT_CONSUMER_GROUP_NAME
+ var receiverTimeout : Long = conf.getDuration(confStreamingPath + "receiverTimeout").toMillis
+ var receiverBatchSize : Int = conf.getInt(confStreamingPath + "receiverBatchSize")
+
+ // Read devices configuration from JSON file
+ private[this] lazy val devicesJsonFile = conf.getString(confConnPath + "devices")
+ private[this] lazy val devicesJson: String = File(devicesJsonFile).slurp()
+ private[this] lazy val devices : Array[DeviceCredentials] = parse(devicesJson).extract[Array[DeviceCredentials]]
+
+ def deviceCredentials(id: String): DeviceCredentials = {
+ val deviceData: Option[DeviceCredentials] = devices.find(x ⇒ x.deviceId == id)
+ if (deviceData == None) {
+ throw new RuntimeException(s"Device '${id}' credentials not found")
+ }
+ deviceData.get
+ }
+
+ if (!Files.exists(Paths.get(devicesJsonFile))) {
+ throw new RuntimeException("Devices credentials not found")
+ }
+}
diff --git a/src/test/scala/it/helpers/Counter.scala b/src/test/scala/it/helpers/Counter.scala
new file mode 100644
index 0000000..1a16a22
--- /dev/null
+++ b/src/test/scala/it/helpers/Counter.scala
@@ -0,0 +1,40 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+package it.helpers
+
+import java.util.concurrent.Executors
+
+import akka.actor.{Actor, Stash}
+
+import scala.concurrent.ExecutionContext
+
+/* Thread safe counter */
+class Counter extends Actor with Stash {
+
+ implicit val executionContext = ExecutionContext
+ .fromExecutorService(Executors.newFixedThreadPool(sys.runtime.availableProcessors))
+
+ private[this] var count: Long = 0
+
+ override def receive: Receive = ready
+
+ def ready: Receive = {
+ case "reset" ⇒ {
+ context.become(busy)
+ count = 0
+ context.become(ready)
+ unstashAll()
+ }
+ case "inc" ⇒ {
+ context.become(busy)
+ count += 1
+ context.become(ready)
+ unstashAll()
+ }
+ case "get" ⇒ sender() ! count
+ }
+
+ def busy: Receive = {
+ case _ ⇒ stash()
+ }
+}
diff --git a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Device.scala b/src/test/scala/it/helpers/Device.scala
similarity index 50%
rename from src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Device.scala
rename to src/test/scala/it/helpers/Device.scala
index 9d4b57a..5e9c686 100644
--- a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Device.scala
+++ b/src/test/scala/it/helpers/Device.scala
@@ -1,16 +1,24 @@
// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.azure.iot.iothubreact.test.helpers
+package it.helpers
import com.microsoft.azure.iothub._
/* Test helper to send messages to the hub */
class Device(deviceId: String) extends Logger {
+ private var ready = true
+ private val waitOnSend = 20000
+ private val waitUnit = 50
+
private[this] class EventCallback extends IotHubEventCallback {
override def execute(status: IotHubStatusCode, context: scala.Any): Unit = {
+ ready = true
val i = context.asInstanceOf[Int]
- log.debug(s"Message ${i} status ${status.name()}")
+ log.debug(s"${deviceId}: Message ${i} status ${status.name()}")
+
+ // Sleep to avoid being throttled
+ Thread.sleep(50)
}
}
@@ -22,16 +30,43 @@ class Device(deviceId: String) extends Logger {
Configuration.iotHubName, credentials.deviceId, credentials.primaryKey)
// Prepare client to send messages
- private[this] lazy val client = new DeviceClient(connString, IotHubClientProtocol.AMQPS)
-
- def disconnect(): Unit = {
- client.close()
+ private[this] lazy val client = {
+ log.info(s"Opening connection for device '${deviceId}'")
+ new DeviceClient(connString, IotHubClientProtocol.AMQPS)
}
def sendMessage(text: String, sequenceNumber: Int): Unit = {
+
+ if (!ready) {
+ waitConfirmation()
+ if (!ready) throw new RuntimeException(s"Device '${deviceId}', the client is busy")
+ }
+
+ ready = false
+
+ // Open internally checks if it is already connected
client.open()
+
log.debug(s"Device '$deviceId' sending '$text'")
val message = new Message(text)
client.sendEventAsync(message, new EventCallback(), sequenceNumber)
}
+
+ def waitConfirmation(): Unit = {
+
+ log.debug(s"Device '${deviceId}' waiting for confirmation...")
+
+ var wait = waitOnSend
+ if (!ready) while (wait > 0 && !ready) {
+ Thread.sleep(waitUnit)
+ wait -= waitUnit
+ }
+
+ if (!ready) log.debug(s"Device '${deviceId}', confirmation not received")
+ }
+
+ def disconnect(): Unit = {
+ client.close()
+ log.debug(s"Device '$deviceId' disconnected")
+ }
}
diff --git a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/DeviceConnectionString.scala b/src/test/scala/it/helpers/DeviceConnectionString.scala
similarity index 90%
rename from src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/DeviceConnectionString.scala
rename to src/test/scala/it/helpers/DeviceConnectionString.scala
index 64de0cf..be803f9 100644
--- a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/DeviceConnectionString.scala
+++ b/src/test/scala/it/helpers/DeviceConnectionString.scala
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.azure.iot.iothubreact.test.helpers
+package it.helpers
/* Format a connection string accordingly to SDK */
object DeviceConnectionString {
diff --git a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/DeviceCredentials.scala b/src/test/scala/it/helpers/DeviceCredentials.scala
similarity index 82%
rename from src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/DeviceCredentials.scala
rename to src/test/scala/it/helpers/DeviceCredentials.scala
index e8e7ca4..ce898a5 100644
--- a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/DeviceCredentials.scala
+++ b/src/test/scala/it/helpers/DeviceCredentials.scala
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.azure.iot.iothubreact.test.helpers
+package it.helpers
/** Model used to deserialize the device credentials
*
diff --git a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Logger.scala b/src/test/scala/it/helpers/Logger.scala
similarity index 75%
rename from src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Logger.scala
rename to src/test/scala/it/helpers/Logger.scala
index 6ba8159..5bc73aa 100644
--- a/src/test/scala/com/microsoft/azure/iot/iothubreact/test/helpers/Logger.scala
+++ b/src/test/scala/it/helpers/Logger.scala
@@ -1,10 +1,14 @@
// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.azure.iot.iothubreact.test.helpers
+package it.helpers
import akka.actor.ActorSystem
import akka.event.{LogSource, Logging}
+object Logger {
+ val actorSystem = ActorSystem("IoTHubReactTests")
+}
+
/** Common logger via Akka
*
* @see http://doc.akka.io/docs/akka/2.4.10/scala/logging.html
@@ -17,5 +21,5 @@ trait Logger {
override def getClazz(o: AnyRef): Class[_] = o.getClass
}
- val log = Logging(ActorSystem("IoTHubReactTests"), this)
+ val log = Logging(Logger.actorSystem, this)
}
diff --git a/tools/devices-simulator/LICENSE b/tools/devices-simulator/LICENSE
new file mode 100644
index 0000000..8cc52aa
--- /dev/null
+++ b/tools/devices-simulator/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) Microsoft Corporation
+All rights reserved.
+MIT License
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the ""Software""), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/tools/devices-simulator/README.md b/tools/devices-simulator/README.md
index cec60a8..8df8f44 100644
--- a/tools/devices-simulator/README.md
+++ b/tools/devices-simulator/README.md
@@ -11,7 +11,7 @@ selecting the "F1 Free" scale tier.
Once you have an IoT hub ready, you should take note of:
* the **connection string** from the **Shared access policies** panel, for
- the **iothubowner** policy.
+ the **device** policy.
## Create the devices
@@ -31,7 +31,7 @@ Once the IoT hub explorer is installed, proceed to create the devices:
* **Login, using the connection string obtained earlier:**
```bash
-CONNSTRING="... iothubowner connection string ..."
+CONNSTRING="... device connection string ..."
iothub-explorer login '$CONNSTRING'
```
@@ -49,7 +49,7 @@ The following command creates a `credentials.js` file with the settings required
From the terminal, 'cd' into the same folder of this README document, and execute:
```bash
-export CONNSTRING="... iothubowner connection string ..."
+export CONNSTRING="... device connection string ..."
./download_credentials.sh
unset CONNSTRING
```
diff --git a/tools/devices-simulator/abstract_simulator.js b/tools/devices-simulator/abstract_simulator.js
index b03f759..819604e 100644
--- a/tools/devices-simulator/abstract_simulator.js
+++ b/tools/devices-simulator/abstract_simulator.js
@@ -21,7 +21,7 @@ function AbstractSimulator(hubName, deviceId, accessKey, protocol, frequency) {
this.frequency = frequency;
this.connectionStatus = "disconnected";
this.clock = null;
- this.model = "";
+ this.messageType = "";
}
/**
@@ -86,9 +86,10 @@ AbstractSimulator.prototype.startSending = function () {
}
var message = new Message(self.generateData());
- if (self.model !== "") {
- message.properties.add("model", self.model);
- console.log("[" + self.name + "] Sending " + self.model + ": " + message.getData());
+ message.properties.add("$$contentType", "json");
+ if (self.messageType !== "") {
+ message.properties.add("$$messageType", self.messageType);
+ console.log("[" + self.name + "] Sending " + self.messageType + ": " + message.getData());
} else {
console.log("[" + self.name + "] Sending message: " + message.getData());
}
diff --git a/tools/devices-simulator/humidity_simulator.js b/tools/devices-simulator/humidity_simulator.js
index 1628bc6..1f0893e 100644
--- a/tools/devices-simulator/humidity_simulator.js
+++ b/tools/devices-simulator/humidity_simulator.js
@@ -7,7 +7,7 @@ var AbstractSimulator = require("./abstract_simulator.js");
// Inheritance
var HumiditySimulator = function () {
AbstractSimulator.apply(this, arguments);
- this.model = "humidity";
+ this.messageType = "humidity";
// (70-30) .. (70+30) => 40 .. 100
this.mid = 70;
diff --git a/tools/devices-simulator/package.json b/tools/devices-simulator/package.json
index dd81918..b946d81 100644
--- a/tools/devices-simulator/package.json
+++ b/tools/devices-simulator/package.json
@@ -6,7 +6,7 @@
"author": "Microsoft Corporation",
"license": "MIT",
"dependencies": {
- "azure-iot-device": "1.0.15",
+ "azure-iot-device": "1.0.16",
"azure-iot-device-amqp": "1.0.16",
"azure-iot-device-http": "1.0.16"
},
diff --git a/tools/devices-simulator/temperature_simulator.js b/tools/devices-simulator/temperature_simulator.js
index 3d29294..54c3d59 100644
--- a/tools/devices-simulator/temperature_simulator.js
+++ b/tools/devices-simulator/temperature_simulator.js
@@ -7,7 +7,7 @@ var AbstractSimulator = require("./abstract_simulator.js");
// Inheritance
var TemperatureSimulator = function () {
AbstractSimulator.apply(this, arguments);
- this.model = "temperature";
+ this.messageType = "temperature";
// (15-25) .. (15+25) => -10 .. 40
this.mid = 15;