diff --git a/samples/README.md b/samples/README.md index 6ae3c05..9f5220c 100644 --- a/samples/README.md +++ b/samples/README.md @@ -51,6 +51,8 @@ You can find all Kafka related configuration on the `function.json.` In the case _function.json_ +#### For Confluent + ```json { "scriptFile" : "../kafka-function-1.0-SNAPSHOT.jar", @@ -72,12 +74,37 @@ _function.json_ } ``` +#### For EventHub + +```json +{ + "scriptFile" : "../kafka-function-1.0-SNAPSHOT.jar", + "entryPoint" : "com.contoso.kafka.TriggerFunction.runMany", + "bindings" : [ { + "type" : "kafkaTrigger", + "direction" : "in", + "name" : "kafkaEvents", + "password" : "%EventHubConnectionString%", + "protocol" : "SASLSSL", + "dataType" : "string", + "topic" : "message", + "authenticationMode" : "PLAIN", + "consumerGroup" : "$Default", + "cardinality" : "MANY", + "username" : "$ConnectionString", + "brokerList" : "%BrokerList%" + } ] +} +``` +**NOTE** For EventHub, username should be set to "$ConnectionString" only. The password should be the actual connection string value that could be set in local.settings.json or appsettings (Please see [local-settings](#localsettingsjson) section for more details). + ### local.settings.json It is the configuration of a local function runtime. If you deploy the target application on Azure with a `local.settings.json,` you will require the same settings on the Function App [App settings](https://docs.microsoft.com/en-us/azure/azure-functions/functions-how-to-use-azure-function-app-settings#settings). -For more details, refer to [Local settings file](https://docs.microsoft.com/en-us/azure/azure-functions/functions-run-local?tabs=macos%2Ccsharp%2Cbash#local-settings-file). +**NOTE** All the passwords and connection strings settings are recommended to be put in appsettings. For more details, refer to [Local settings file](https://docs.microsoft.com/en-us/azure/azure-functions/functions-run-local?tabs=v4%2Cmacos%2Ccsharp%2Cportal%2Cbash#local-settings). +#### For Confluent ```json { "IsEncrypted": false, @@ -85,12 +112,26 @@ For more details, refer to [Local settings file](https://docs.microsoft.com/en-u "BrokerList": "{YOUR_CONFLUENT_CLOUD_BROKER}", "ConfluentCloudUserName": "{YOUR_CONFLUENT_CLOUD_USERNAME}", "ConfluentCloudPassword": "{YOUR_CONFLUENT_CLOUD_PASSWORD}", - "FUNCTIONS_WORKER_RUNTIME": "python", - "AzureWebJobsStorage": "" + "FUNCTIONS_WORKER_RUNTIME": "", + "AzureWebJobsStorage": "", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" } } ``` +#### For EventHub +```json +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "", + "BrokerList": ".servicebus.windows.net:9093", + "EventHubConnectionString": "", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} +``` ### Extension Bundle and install Kafka extension Currently, in Azure Functions - most triggers and bindings are ordinarily obtained using the extension bundle. However, currently, the Kafka extension is not part of the extension bundle (will be added in the future). Meanwhile, you will have to install the Kafka extension manually. @@ -182,3 +223,19 @@ The sample provides a devcontainer profile. Open the folder in VsCode and perfor } } ``` + +### Headers +Headers are supported for both Kafka Trigger and Kafka Output binding. You can find the samples for headers in this folder with name `KafkaTriggerWithHeaders`, `KafkaTriggerManyWithHeaders` for Trigger functions and `KafakOutputWithHeaders`, `KafkaOutputManyWithHeaders` for output binding functions. +#### Output Binding Functions +`KafkaOutputWithHeaders` is a sample for single event type while `KafkaOutputManyWithHeaders` is for batch events. + +To run `KafkaOutputWithHeaders` function, send a http GET request with message at url `http://localhost:7071/api/KafkaOutputWithHeaders?message=`. It will create a new Kafka Event with payload as your_message and headers as `{ Key: 'test', Value: ''}`. + +Similarly, to run `KafkaOutputManyWithHeaders` function, send a http GET request at url `http://localhost:7071/api/KafkaOutputManyWithHeaders`. It would create two messages with headers on given topic. + +#### Trigger Functions +`KafkaTriggerWithHeaders` is a sample for single event type while `KafkaTriggerManyWithHeaders` is for batch events. + +`KafkaTriggerWithHeaders` will be triggered whenever there is a Kafka Event. It prints the message and the corresponding headers for that message. + +Similarly, `KafkaTriggerManyWithHeaders` is a trigger function which processes batch of Kafka events. For all the events in the batch, it prints the message and corresponding headers. \ No newline at end of file diff --git a/samples/dotnet-isolated/confluent/.gitignore b/samples/dotnet-isolated/confluent/.gitignore new file mode 100644 index 0000000..ff5b00c --- /dev/null +++ b/samples/dotnet-isolated/confluent/.gitignore @@ -0,0 +1,264 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. + +# Azure Functions localsettings file +local.settings.json + +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# DNX +project.lock.json +project.fragment.lock.json +artifacts/ + +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# TODO: Comment the next line if you want to checkin your web deploy settings +# but database connection strings (with potential passwords) will be unencrypted +#*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/packages/* +# except build/, which is used as an MSBuild target. +!**/packages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/packages/repositories.config +# NuGet v3's project.json files produces more ignoreable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +node_modules/ +orleans.codegen.cs + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc \ No newline at end of file diff --git a/samples/dotnet-isolated/confluent/Confluent.csproj b/samples/dotnet-isolated/confluent/Confluent.csproj new file mode 100644 index 0000000..e9a3616 --- /dev/null +++ b/samples/dotnet-isolated/confluent/Confluent.csproj @@ -0,0 +1,24 @@ + + + net6.0 + v4 + Exe + + + + + + + + + + + + PreserveNewest + + + PreserveNewest + Never + + + diff --git a/samples/dotnet-isolated/confluent/KafkaOutput.cs b/samples/dotnet-isolated/confluent/KafkaOutput.cs new file mode 100644 index 0000000..4aed361 --- /dev/null +++ b/samples/dotnet-isolated/confluent/KafkaOutput.cs @@ -0,0 +1,47 @@ +using System; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; +using Microsoft.Azure.Functions.Worker.Http; +using System.Net; + +namespace Confluent +{ + public class KafkaOutput + { + [Function("KafkaOutput")] + + public static MultipleOutputType Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequestData req, + FunctionContext executionContext) + { + var log = executionContext.GetLogger("HttpFunction"); + log.LogInformation("C# HTTP trigger function processed a request."); + + string message = req.FunctionContext + .BindingContext + .BindingData["message"] + .ToString(); + + var response = req.CreateResponse(HttpStatusCode.OK); + return new MultipleOutputType() + { + Kevent = message, + HttpResponse = response + }; + } + } + + public class MultipleOutputType + { + [KafkaOutput("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] + public string Kevent { get; set; } + + public HttpResponseData HttpResponse { get; set; } + } +} diff --git a/samples/dotnet-isolated/confluent/KafkaOutputMany.cs b/samples/dotnet-isolated/confluent/KafkaOutputMany.cs new file mode 100644 index 0000000..584f45c --- /dev/null +++ b/samples/dotnet-isolated/confluent/KafkaOutputMany.cs @@ -0,0 +1,46 @@ +using System; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; +using Microsoft.Azure.Functions.Worker.Http; +using System.Net; + +namespace Confluent +{ + public class KafkaOutputMany + { + [Function("KafkaOutputMany")] + + public static MultipleOutputTypeForBatch Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequestData req, + FunctionContext executionContext) + { + var log = executionContext.GetLogger("HttpFunction"); + log.LogInformation("C# HTTP trigger function processed a request."); + var response = req.CreateResponse(HttpStatusCode.OK); + + string[] messages = new string[2]; + messages[0] = "one"; + messages[1] = "two"; + + return new MultipleOutputTypeForBatch() + { + Kevents = messages, + HttpResponse = response + }; + } + } + + public class MultipleOutputTypeForBatch + { + [KafkaOutput("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] + public string[] Kevents { get; set; } + + public HttpResponseData HttpResponse { get; set; } + } +} diff --git a/samples/dotnet-isolated/confluent/KafkaTrigger.cs b/samples/dotnet-isolated/confluent/KafkaTrigger.cs new file mode 100644 index 0000000..5742298 --- /dev/null +++ b/samples/dotnet-isolated/confluent/KafkaTrigger.cs @@ -0,0 +1,26 @@ +using System; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; +using Microsoft.Azure.Functions.Worker.Http; +using Newtonsoft.Json.Linq; + + +namespace Confluent +{ + public class KafkaTrigger + { + [Function("KafkaTrigger")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default")] string eventData, FunctionContext context) + { + var logger = context.GetLogger("KafkaFunction"); + logger.LogInformation($"C# Kafka trigger function processed a message: {JObject.Parse(eventData)["Value"]}"); + } + } +} \ No newline at end of file diff --git a/samples/dotnet-isolated/confluent/KafkaTriggerMany.cs b/samples/dotnet-isolated/confluent/KafkaTriggerMany.cs new file mode 100644 index 0000000..38b519a --- /dev/null +++ b/samples/dotnet-isolated/confluent/KafkaTriggerMany.cs @@ -0,0 +1,30 @@ +using System; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; +using Microsoft.Azure.Functions.Worker.Http; +using Newtonsoft.Json.Linq; + + +namespace Confluent +{ + public class KafkaTriggerMany + { + [Function("KafkaTriggerMany")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default", + IsBatched = true)] string[] events, FunctionContext context) + { + foreach (var kevent in events) + { + var logger = context.GetLogger("KafkaFunction"); + logger.LogInformation($"C# Kafka trigger function processed a message: {JObject.Parse(kevent)["Value"]}"); + } + } + } +} \ No newline at end of file diff --git a/samples/dotnet-isolated/confluent/Program.cs b/samples/dotnet-isolated/confluent/Program.cs new file mode 100644 index 0000000..884bc0a --- /dev/null +++ b/samples/dotnet-isolated/confluent/Program.cs @@ -0,0 +1,19 @@ +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Hosting; +using Microsoft.Azure.Functions.Worker.Configuration; + +namespace Confluent +{ + public class Program + { + public static void Main() + { + var host = new HostBuilder() + .ConfigureFunctionsWorkerDefaults() + .Build(); + + host.Run(); + } + } +} \ No newline at end of file diff --git a/samples/dotnet-isolated/confluent/host.json b/samples/dotnet-isolated/confluent/host.json new file mode 100644 index 0000000..a5ada92 --- /dev/null +++ b/samples/dotnet-isolated/confluent/host.json @@ -0,0 +1,16 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + }, + "extensions": { + "kafka": { + "maxBatchSize": 3 + } + } +} \ No newline at end of file diff --git a/samples/dotnet-isolated/confluent/local.settings.json.example b/samples/dotnet-isolated/confluent/local.settings.json.example new file mode 100644 index 0000000..17e427e --- /dev/null +++ b/samples/dotnet-isolated/confluent/local.settings.json.example @@ -0,0 +1,11 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "dotnet-isolated", + "BrokerList": "{YOUR_CONFLUENT_CLOUD_BROKER}", + "ConfluentCloudUserName": "{YOUR_CONFLUENT_CLOUD_USERNAME}", + "ConfluentCloudPassword": "{YOUR_CONFLUENT_CLOUD_PASSWORD}", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} \ No newline at end of file diff --git a/samples/dotnet-isolated/eventhub/.gitignore b/samples/dotnet-isolated/eventhub/.gitignore new file mode 100644 index 0000000..ff5b00c --- /dev/null +++ b/samples/dotnet-isolated/eventhub/.gitignore @@ -0,0 +1,264 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. + +# Azure Functions localsettings file +local.settings.json + +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# DNX +project.lock.json +project.fragment.lock.json +artifacts/ + +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# TODO: Comment the next line if you want to checkin your web deploy settings +# but database connection strings (with potential passwords) will be unencrypted +#*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/packages/* +# except build/, which is used as an MSBuild target. +!**/packages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/packages/repositories.config +# NuGet v3's project.json files produces more ignoreable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +node_modules/ +orleans.codegen.cs + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc \ No newline at end of file diff --git a/samples/dotnet-isolated/eventhub/Eventhub.csproj b/samples/dotnet-isolated/eventhub/Eventhub.csproj new file mode 100644 index 0000000..e9a3616 --- /dev/null +++ b/samples/dotnet-isolated/eventhub/Eventhub.csproj @@ -0,0 +1,24 @@ + + + net6.0 + v4 + Exe + + + + + + + + + + + + PreserveNewest + + + PreserveNewest + Never + + + diff --git a/samples/dotnet-isolated/eventhub/KafkaOutput.cs b/samples/dotnet-isolated/eventhub/KafkaOutput.cs new file mode 100644 index 0000000..b143e9b --- /dev/null +++ b/samples/dotnet-isolated/eventhub/KafkaOutput.cs @@ -0,0 +1,47 @@ +using System; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; +using Microsoft.Azure.Functions.Worker.Http; +using System.Net; + +namespace Eventhub +{ + public class KafkaOutput + { + [Function("KafkaOutput")] + + public static MultipleOutputType Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequestData req, + FunctionContext executionContext) + { + var log = executionContext.GetLogger("HttpFunction"); + log.LogInformation("C# HTTP trigger function processed a request."); + + string message = req.FunctionContext + .BindingContext + .BindingData["message"] + .ToString(); + + var response = req.CreateResponse(HttpStatusCode.OK); + return new MultipleOutputType() + { + Kevent = message, + HttpResponse = response + }; + } + } + + public class MultipleOutputType + { + [KafkaOutput("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "EventHubConnectionString", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] + public string Kevent { get; set; } + + public HttpResponseData HttpResponse { get; set; } + } +} diff --git a/samples/dotnet-isolated/eventhub/KafkaOutputMany.cs b/samples/dotnet-isolated/eventhub/KafkaOutputMany.cs new file mode 100644 index 0000000..56a1613 --- /dev/null +++ b/samples/dotnet-isolated/eventhub/KafkaOutputMany.cs @@ -0,0 +1,46 @@ +using System; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; +using Microsoft.Azure.Functions.Worker.Http; +using System.Net; + +namespace Eventhub +{ + public class KafkaOutputMany + { + [Function("KafkaOutputMany")] + + public static MultipleOutputTypeForBatch Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequestData req, + FunctionContext executionContext) + { + var log = executionContext.GetLogger("HttpFunction"); + log.LogInformation("C# HTTP trigger function processed a request."); + var response = req.CreateResponse(HttpStatusCode.OK); + + string[] messages = new string[2]; + messages[0] = "one"; + messages[1] = "two"; + + return new MultipleOutputTypeForBatch() + { + Kevents = messages, + HttpResponse = response + }; + } + } + + public class MultipleOutputTypeForBatch + { + [KafkaOutput("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "EventHubConnectionString", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] + public string[] Kevents { get; set; } + + public HttpResponseData HttpResponse { get; set; } + } +} diff --git a/samples/dotnet-isolated/eventhub/KafkaTrigger.cs b/samples/dotnet-isolated/eventhub/KafkaTrigger.cs new file mode 100644 index 0000000..62062c7 --- /dev/null +++ b/samples/dotnet-isolated/eventhub/KafkaTrigger.cs @@ -0,0 +1,26 @@ +using System; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; +using Microsoft.Azure.Functions.Worker.Http; +using Newtonsoft.Json.Linq; + + +namespace Eventhub +{ + public class KafkaTrigger + { + [Function("KafkaTrigger")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "EventHubConnectionString", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default")] string eventData, FunctionContext context) + { + var logger = context.GetLogger("KafkaFunction"); + logger.LogInformation($"C# Kafka trigger function processed a message: {JObject.Parse(eventData)["Value"]}"); + } + } +} \ No newline at end of file diff --git a/samples/dotnet-isolated/eventhub/KafkaTriggerMany.cs b/samples/dotnet-isolated/eventhub/KafkaTriggerMany.cs new file mode 100644 index 0000000..b40952d --- /dev/null +++ b/samples/dotnet-isolated/eventhub/KafkaTriggerMany.cs @@ -0,0 +1,30 @@ +using System; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; +using Microsoft.Azure.Functions.Worker.Http; +using Newtonsoft.Json.Linq; + + +namespace Eventhub +{ + public class KafkaTriggerMany + { + [Function("KafkaTriggerMany")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "EventHubConnectionString", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default", + IsBatched = true)] string[] events, FunctionContext context) + { + foreach (var kevent in events) + { + var logger = context.GetLogger("KafkaFunction"); + logger.LogInformation($"C# Kafka trigger function processed a message: {JObject.Parse(kevent)["Value"]}"); + } + } + } +} \ No newline at end of file diff --git a/samples/dotnet-isolated/eventhub/Program.cs b/samples/dotnet-isolated/eventhub/Program.cs new file mode 100644 index 0000000..f958efc --- /dev/null +++ b/samples/dotnet-isolated/eventhub/Program.cs @@ -0,0 +1,19 @@ +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Hosting; +using Microsoft.Azure.Functions.Worker.Configuration; + +namespace Eventhub +{ + public class Program + { + public static void Main() + { + var host = new HostBuilder() + .ConfigureFunctionsWorkerDefaults() + .Build(); + + host.Run(); + } + } +} \ No newline at end of file diff --git a/samples/dotnet-isolated/eventhub/host.json b/samples/dotnet-isolated/eventhub/host.json new file mode 100644 index 0000000..a5ada92 --- /dev/null +++ b/samples/dotnet-isolated/eventhub/host.json @@ -0,0 +1,16 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + }, + "extensions": { + "kafka": { + "maxBatchSize": 3 + } + } +} \ No newline at end of file diff --git a/samples/dotnet-isolated/eventhub/local.settings.json.example b/samples/dotnet-isolated/eventhub/local.settings.json.example new file mode 100644 index 0000000..443e707 --- /dev/null +++ b/samples/dotnet-isolated/eventhub/local.settings.json.example @@ -0,0 +1,10 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "dotnet-isolated", + "BrokerList": ".servicebus.windows.net:9093", + "EventHubConnectionString": "", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} \ No newline at end of file diff --git a/samples/dotnet/Confluent/.gitignore b/samples/dotnet/Confluent/.gitignore new file mode 100644 index 0000000..ff5b00c --- /dev/null +++ b/samples/dotnet/Confluent/.gitignore @@ -0,0 +1,264 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. + +# Azure Functions localsettings file +local.settings.json + +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# DNX +project.lock.json +project.fragment.lock.json +artifacts/ + +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# TODO: Comment the next line if you want to checkin your web deploy settings +# but database connection strings (with potential passwords) will be unencrypted +#*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/packages/* +# except build/, which is used as an MSBuild target. +!**/packages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/packages/repositories.config +# NuGet v3's project.json files produces more ignoreable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +node_modules/ +orleans.codegen.cs + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc \ No newline at end of file diff --git a/samples/dotnet/Confluent/Confluent.csproj b/samples/dotnet/Confluent/Confluent.csproj new file mode 100644 index 0000000..6ea3eed --- /dev/null +++ b/samples/dotnet/Confluent/Confluent.csproj @@ -0,0 +1,20 @@ + + + net6.0 + v4 + + + + + + + + + PreserveNewest + + + PreserveNewest + Never + + + diff --git a/samples/dotnet/Confluent/KafkaOutput.cs b/samples/dotnet/Confluent/KafkaOutput.cs new file mode 100644 index 0000000..bedf0a5 --- /dev/null +++ b/samples/dotnet/Confluent/KafkaOutput.cs @@ -0,0 +1,34 @@ +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Http; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Extensions.Logging; + +namespace Confluent +{ + public class KafkaOutput + { + [FunctionName("KafkaOutput")] + public static IActionResult Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequest req, + [Kafka("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] out string eventData, + ILogger log) + { + log.LogInformation("C# HTTP trigger function processed a request."); + + string message = req.Query["message"]; + + string responseMessage = "Ok"; + eventData = message; + + return new OkObjectResult(responseMessage); + } + } +} \ No newline at end of file diff --git a/samples/dotnet/Confluent/KafkaOutputMany.cs b/samples/dotnet/Confluent/KafkaOutputMany.cs new file mode 100644 index 0000000..5ae2281 --- /dev/null +++ b/samples/dotnet/Confluent/KafkaOutputMany.cs @@ -0,0 +1,31 @@ +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Http; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Extensions.Logging; + +namespace Confluent +{ + public class KafkaOutputMany + { + [FunctionName("KafkaOutputMany")] + public static IActionResult Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequest req, + [Kafka("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] out KafkaEventData[] eventDataArr, + ILogger log) + { + log.LogInformation("C# HTTP trigger function processed a request."); + eventDataArr = new KafkaEventData[2]; + eventDataArr[0] = new KafkaEventData("one"); + eventDataArr[1] = new KafkaEventData("two"); + return new OkObjectResult("Ok"); + } + } +} \ No newline at end of file diff --git a/samples/dotnet/Confluent/KafkaOutputManyWithHeaders.cs b/samples/dotnet/Confluent/KafkaOutputManyWithHeaders.cs new file mode 100644 index 0000000..9b889f2 --- /dev/null +++ b/samples/dotnet/Confluent/KafkaOutputManyWithHeaders.cs @@ -0,0 +1,33 @@ +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Http; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Extensions.Logging; + +namespace Confluent +{ + public class KafkaOutputManyWithHeaders + { + [FunctionName("KafkaOutputManyWithHeaders")] + public static IActionResult Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequest req, + [Kafka("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] out KafkaEventData[] eventDataArr, + ILogger log) + { + log.LogInformation("C# HTTP trigger function processed a request."); + eventDataArr = new KafkaEventData[2]; + eventDataArr[0] = new KafkaEventData("one"); + eventDataArr[0].Headers.Add("test", System.Text.Encoding.UTF8.GetBytes("dotnet")); + eventDataArr[1] = new KafkaEventData("two"); + eventDataArr[1].Headers.Add("test1", System.Text.Encoding.UTF8.GetBytes("dotnet")); + return new OkObjectResult("Ok"); + } + } +} \ No newline at end of file diff --git a/samples/dotnet/Confluent/KafkaOutputWithHeaders.cs b/samples/dotnet/Confluent/KafkaOutputWithHeaders.cs new file mode 100644 index 0000000..5b49d0d --- /dev/null +++ b/samples/dotnet/Confluent/KafkaOutputWithHeaders.cs @@ -0,0 +1,33 @@ +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Http; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Extensions.Logging; + +namespace Confluent +{ + public class KafkaOutputWithHeaders + { + [FunctionName("KafkaOutputWithHeaders")] + public static IActionResult Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequest req, + [Kafka("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] out KafkaEventData eventData, + ILogger log) + { + log.LogInformation("C# HTTP trigger function processed a request."); + + string message = req.Query["message"]; + eventData = new KafkaEventData(message); + eventData.Headers.Add("test", System.Text.Encoding.UTF8.GetBytes("dotnet")); + + return new OkObjectResult("Ok"); + } + } +} \ No newline at end of file diff --git a/samples/dotnet/Confluent/KafkaTrigger.cs b/samples/dotnet/Confluent/KafkaTrigger.cs new file mode 100644 index 0000000..a8af52f --- /dev/null +++ b/samples/dotnet/Confluent/KafkaTrigger.cs @@ -0,0 +1,23 @@ +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Azure.WebJobs.Extensions.Storage; +using Microsoft.Extensions.Logging; + +namespace Confluent +{ + public class KafkaTrigger + { + [FunctionName("KafkaTrigger")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default")] KafkaEventData kevent, ILogger log) + { + log.LogInformation($"C# Kafka trigger function processed a message: {kevent.Value}"); + } + } +} \ No newline at end of file diff --git a/samples/dotnet/Confluent/KafkaTriggerMany.cs b/samples/dotnet/Confluent/KafkaTriggerMany.cs new file mode 100644 index 0000000..c6e105d --- /dev/null +++ b/samples/dotnet/Confluent/KafkaTriggerMany.cs @@ -0,0 +1,26 @@ +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Azure.WebJobs.Extensions.Storage; +using Microsoft.Extensions.Logging; + +namespace Confluent +{ + public class KafkaTriggerMany + { + [FunctionName("KafkaTriggerMany")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default")] KafkaEventData[] events, ILogger log) + { + foreach (KafkaEventData kevent in events) + { + log.LogInformation($"C# Kafka trigger function processed a message: {kevent.Value}"); + } + } + } +} \ No newline at end of file diff --git a/samples/dotnet/Confluent/KafkaTriggerManyWithHeaders.cs b/samples/dotnet/Confluent/KafkaTriggerManyWithHeaders.cs new file mode 100644 index 0000000..6346c06 --- /dev/null +++ b/samples/dotnet/Confluent/KafkaTriggerManyWithHeaders.cs @@ -0,0 +1,33 @@ +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Azure.WebJobs.Extensions.Storage; +using Microsoft.Extensions.Logging; +using System.Collections.Generic; + +namespace Confluent +{ + public class KafkaTriggerManyWithHeaders + { + [FunctionName("KafkaTriggerManyWithHeaders")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default")] KafkaEventData[] events, ILogger log) + { + foreach (KafkaEventData eventData in events) + { + log.LogInformation($"C# Kafka trigger function processed a message: {eventData.Value}"); + log.LogInformation($"Headers: "); + var headers = eventData.Headers; + foreach (var header in headers) + { + log.LogInformation($"Key = {header.Key} Value = {System.Text.Encoding.UTF8.GetString(header.Value)}"); + } + } + } + } +} \ No newline at end of file diff --git a/samples/dotnet/Confluent/KafkaTriggerWithHeaders.cs b/samples/dotnet/Confluent/KafkaTriggerWithHeaders.cs new file mode 100644 index 0000000..fc7add4 --- /dev/null +++ b/samples/dotnet/Confluent/KafkaTriggerWithHeaders.cs @@ -0,0 +1,29 @@ +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Azure.WebJobs.Extensions.Storage; +using Microsoft.Extensions.Logging; + +namespace Confluent +{ + public class KafkaTriggerSingleWithHeaders + { + [FunctionName("KafkaTriggerSingleWithHeaders")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "ConfluentCloudUserName", + Password = "ConfluentCloudPassword", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default")] KafkaEventData kevent, ILogger log) + { + log.LogInformation($"C# Kafka trigger function processed a message: {kevent.Value}"); + log.LogInformation("Headers: "); + var headers = kevent.Headers; + foreach (var header in headers) + { + log.LogInformation($"Key = {header.Key} Value = {System.Text.Encoding.UTF8.GetString(header.Value)}"); + } + } + } +} \ No newline at end of file diff --git a/samples/dotnet/Confluent/host.json b/samples/dotnet/Confluent/host.json new file mode 100644 index 0000000..beb2e40 --- /dev/null +++ b/samples/dotnet/Confluent/host.json @@ -0,0 +1,11 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + } +} \ No newline at end of file diff --git a/samples/dotnet/Confluent/local.settings.json.example b/samples/dotnet/Confluent/local.settings.json.example new file mode 100644 index 0000000..c519ed1 --- /dev/null +++ b/samples/dotnet/Confluent/local.settings.json.example @@ -0,0 +1,11 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "dotnet", + "BrokerList": "{YOUR_CONFLUENT_CLOUD_BROKER}", + "ConfluentCloudUserName": "{YOUR_CONFLUENT_CLOUD_USERNAME}", + "ConfluentCloudPassword": "{YOUR_CONFLUENT_CLOUD_PASSWORD}", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} \ No newline at end of file diff --git a/samples/dotnet/EventHub/.gitignore b/samples/dotnet/EventHub/.gitignore new file mode 100644 index 0000000..ff5b00c --- /dev/null +++ b/samples/dotnet/EventHub/.gitignore @@ -0,0 +1,264 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. + +# Azure Functions localsettings file +local.settings.json + +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# DNX +project.lock.json +project.fragment.lock.json +artifacts/ + +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# TODO: Comment the next line if you want to checkin your web deploy settings +# but database connection strings (with potential passwords) will be unencrypted +#*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/packages/* +# except build/, which is used as an MSBuild target. +!**/packages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/packages/repositories.config +# NuGet v3's project.json files produces more ignoreable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +node_modules/ +orleans.codegen.cs + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc \ No newline at end of file diff --git a/samples/dotnet/EventHub/EventHub.csproj b/samples/dotnet/EventHub/EventHub.csproj new file mode 100644 index 0000000..6ea3eed --- /dev/null +++ b/samples/dotnet/EventHub/EventHub.csproj @@ -0,0 +1,20 @@ + + + net6.0 + v4 + + + + + + + + + PreserveNewest + + + PreserveNewest + Never + + + diff --git a/samples/dotnet/EventHub/KafkaOutput.cs b/samples/dotnet/EventHub/KafkaOutput.cs new file mode 100644 index 0000000..078e706 --- /dev/null +++ b/samples/dotnet/EventHub/KafkaOutput.cs @@ -0,0 +1,34 @@ +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Http; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Extensions.Logging; + +namespace Eventhub +{ + public class KafkaOutput + { + [FunctionName("KafkaOutput")] + public static IActionResult Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequest req, + [Kafka("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "%EventHubConnectionString%", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] out string eventData, + ILogger log) + { + log.LogInformation("C# HTTP trigger function processed a request."); + + string message = req.Query["message"]; + + string responseMessage = "Ok"; + eventData = message; + + return new OkObjectResult(responseMessage); + } + } +} \ No newline at end of file diff --git a/samples/dotnet/EventHub/KafkaOutputMany.cs b/samples/dotnet/EventHub/KafkaOutputMany.cs new file mode 100644 index 0000000..52d425f --- /dev/null +++ b/samples/dotnet/EventHub/KafkaOutputMany.cs @@ -0,0 +1,31 @@ +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Http; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Extensions.Logging; + +namespace Eventhub +{ + public class KafkaOutputMany + { + [FunctionName("KafkaOutputMany")] + public static IActionResult Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequest req, + [Kafka("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "%EventHubConnectionString%", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] out KafkaEventData[] eventDataArr, + ILogger log) + { + log.LogInformation("C# HTTP trigger function processed a request."); + eventDataArr = new KafkaEventData[2]; + eventDataArr[0] = new KafkaEventData("one"); + eventDataArr[1] = new KafkaEventData("two"); + return new OkObjectResult("Ok"); + } + } +} \ No newline at end of file diff --git a/samples/dotnet/EventHub/KafkaOutputManyWithHeaders.cs b/samples/dotnet/EventHub/KafkaOutputManyWithHeaders.cs new file mode 100644 index 0000000..02487e8 --- /dev/null +++ b/samples/dotnet/EventHub/KafkaOutputManyWithHeaders.cs @@ -0,0 +1,33 @@ +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Http; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Extensions.Logging; + +namespace Eventhub +{ + public class KafkaOutputManyWithHeaders + { + [FunctionName("KafkaOutputManyWithHeaders")] + public static IActionResult Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequest req, + [Kafka("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "%EventHubConnectionString%", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] out KafkaEventData[] eventDataArr, + ILogger log) + { + log.LogInformation("C# HTTP trigger function processed a request."); + eventDataArr = new KafkaEventData[2]; + eventDataArr[0] = new KafkaEventData("one"); + eventDataArr[0].Headers.Add("test", System.Text.Encoding.UTF8.GetBytes("dotnet")); + eventDataArr[1] = new KafkaEventData("two"); + eventDataArr[1].Headers.Add("test1", System.Text.Encoding.UTF8.GetBytes("dotnet")); + return new OkObjectResult("Ok"); + } + } +} \ No newline at end of file diff --git a/samples/dotnet/EventHub/KafkaOutputWithHeaders.cs b/samples/dotnet/EventHub/KafkaOutputWithHeaders.cs new file mode 100644 index 0000000..71d0977 --- /dev/null +++ b/samples/dotnet/EventHub/KafkaOutputWithHeaders.cs @@ -0,0 +1,33 @@ +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Http; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Extensions.Logging; + +namespace Eventhub +{ + public class KafkaOutputWithHeaders + { + [FunctionName("KafkaOutputWithHeaders")] + public static IActionResult Output( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequest req, + [Kafka("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "%EventHubConnectionString%", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain + )] out KafkaEventData eventData, + ILogger log) + { + log.LogInformation("C# HTTP trigger function processed a request."); + + string message = req.Query["message"]; + eventData = new KafkaEventData(message); + eventData.Headers.Add("test", System.Text.Encoding.UTF8.GetBytes("dotnet")); + + return new OkObjectResult("Ok"); + } + } +} \ No newline at end of file diff --git a/samples/dotnet/EventHub/KafkaTrigger.cs b/samples/dotnet/EventHub/KafkaTrigger.cs new file mode 100644 index 0000000..2eb6584 --- /dev/null +++ b/samples/dotnet/EventHub/KafkaTrigger.cs @@ -0,0 +1,23 @@ +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Azure.WebJobs.Extensions.Storage; +using Microsoft.Extensions.Logging; + +namespace Eventhub +{ + public class KafkaTrigger + { + [FunctionName("KafkaTrigger")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "%EventHubConnectionString%", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default")] KafkaEventData kevent, ILogger log) + { + log.LogInformation($"C# Kafka trigger function processed a message: {kevent.Value}"); + } + } +} \ No newline at end of file diff --git a/samples/dotnet/EventHub/KafkaTriggerMany.cs b/samples/dotnet/EventHub/KafkaTriggerMany.cs new file mode 100644 index 0000000..6e21da9 --- /dev/null +++ b/samples/dotnet/EventHub/KafkaTriggerMany.cs @@ -0,0 +1,26 @@ +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Azure.WebJobs.Extensions.Storage; +using Microsoft.Extensions.Logging; + +namespace Eventhub +{ + public class KafkaTriggerMany + { + [FunctionName("KafkaTriggerMany")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "%EventHubConnectionString%", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default")] KafkaEventData[] events, ILogger log) + { + foreach (KafkaEventData kevent in events) + { + log.LogInformation($"C# Kafka trigger function processed a message: {kevent.Value}"); + } + } + } +} \ No newline at end of file diff --git a/samples/dotnet/EventHub/KafkaTriggerManyWithHeaders.cs b/samples/dotnet/EventHub/KafkaTriggerManyWithHeaders.cs new file mode 100644 index 0000000..fd36a13 --- /dev/null +++ b/samples/dotnet/EventHub/KafkaTriggerManyWithHeaders.cs @@ -0,0 +1,33 @@ +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Azure.WebJobs.Extensions.Storage; +using Microsoft.Extensions.Logging; +using System.Collections.Generic; + +namespace Eventhub +{ + public class KafkaTriggerManyWithHeaders + { + [FunctionName("KafkaTriggerManyWithHeaders")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "%EventHubConnectionString%", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default")] KafkaEventData[] events, ILogger log) + { + foreach (KafkaEventData eventData in events) + { + log.LogInformation($"C# Kafka trigger function processed a message: {eventData.Value}"); + log.LogInformation($"Headers: "); + var headers = eventData.Headers; + foreach (var header in headers) + { + log.LogInformation($"Key = {header.Key} Value = {System.Text.Encoding.UTF8.GetString(header.Value)}"); + } + } + } + } +} \ No newline at end of file diff --git a/samples/dotnet/EventHub/KafkaTriggerWithHeaders.cs b/samples/dotnet/EventHub/KafkaTriggerWithHeaders.cs new file mode 100644 index 0000000..28c1740 --- /dev/null +++ b/samples/dotnet/EventHub/KafkaTriggerWithHeaders.cs @@ -0,0 +1,29 @@ +using Microsoft.Azure.WebJobs; +using Microsoft.Azure.WebJobs.Extensions.Kafka; +using Microsoft.Azure.WebJobs.Extensions.Storage; +using Microsoft.Extensions.Logging; + +namespace Eventhub +{ + public class KafkaTriggerSingleWithHeaders + { + [FunctionName("KafkaTriggerSingleWithHeaders")] + public static void Run( + [KafkaTrigger("BrokerList", + "topic", + Username = "$ConnectionString", + Password = "%EventHubConnectionString%", + Protocol = BrokerProtocol.SaslSsl, + AuthenticationMode = BrokerAuthenticationMode.Plain, + ConsumerGroup = "$Default")] KafkaEventData kevent, ILogger log) + { + log.LogInformation($"C# Kafka trigger function processed a message: {kevent.Value}"); + log.LogInformation("Headers: "); + var headers = kevent.Headers; + foreach (var header in headers) + { + log.LogInformation($"Key = {header.Key} Value = {System.Text.Encoding.UTF8.GetString(header.Value)}"); + } + } + } +} \ No newline at end of file diff --git a/samples/dotnet/EventHub/host.json b/samples/dotnet/EventHub/host.json new file mode 100644 index 0000000..81e35b7 --- /dev/null +++ b/samples/dotnet/EventHub/host.json @@ -0,0 +1,3 @@ +{ + "version": "2.0" +} \ No newline at end of file diff --git a/samples/dotnet/EventHub/local.settings.json.example b/samples/dotnet/EventHub/local.settings.json.example new file mode 100644 index 0000000..57e41ad --- /dev/null +++ b/samples/dotnet/EventHub/local.settings.json.example @@ -0,0 +1,10 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "dotnet", + "BrokerList": ".servicebus.windows.net:9093", + "EventHubConnectionString": "", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} \ No newline at end of file diff --git a/samples/java/confluent/extensions.csproj b/samples/java/confluent/extensions.csproj index 3e64f47..4bf4251 100644 --- a/samples/java/confluent/extensions.csproj +++ b/samples/java/confluent/extensions.csproj @@ -5,12 +5,12 @@ ** - + - + diff --git a/samples/java/confluent/local.settings.json.example b/samples/java/confluent/local.settings.json.example index 5c67a89..1b159f9 100644 --- a/samples/java/confluent/local.settings.json.example +++ b/samples/java/confluent/local.settings.json.example @@ -1,8 +1,9 @@ { "IsEncrypted": false, "Values": { - "BrokerList": "YOUR_BROKER_LIST_HERE", - "ConfluentCloudUsername": "YOUR_CONFLUENT_USER_NAME_HERE", - "ConfluentCloudPassword": "YOUR_CONFLUENT_PASSWORD_HERE" + "BrokerList": "YOUR_BROKER_LIST_HERE", + "ConfluentCloudUsername": "YOUR_CONFLUENT_USER_NAME_HERE", + "ConfluentCloudPassword": "YOUR_CONFLUENT_PASSWORD_HERE", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" } } \ No newline at end of file diff --git a/samples/java/confluent/pom.xml b/samples/java/confluent/pom.xml index 418891b..1e57d8f 100644 --- a/samples/java/confluent/pom.xml +++ b/samples/java/confluent/pom.xml @@ -14,8 +14,8 @@ UTF-8 1.8 1.8 - 1.8.0 - 1.4.0 + 1.18.0 + 2.0.0 kafka-function-20190419163130420 westus ${project.build.directory}/azure-functions/${functionAppName} @@ -52,16 +52,6 @@ - - junit - junit - 4.13.1 - - - org.mockito - mockito-core - 2.4.0 - com.microsoft.azure.functions azure-functions-java-library @@ -75,17 +65,10 @@ com.microsoft.azure.functions azure-functions-java-library - - - junit - junit - test - - - org.mockito - mockito-core - test + com.google.code.gson + gson + 2.9.0 diff --git a/samples/java/confluent/pom_linux.xml b/samples/java/confluent/pom_linux.xml index f70be03..08295bb 100644 --- a/samples/java/confluent/pom_linux.xml +++ b/samples/java/confluent/pom_linux.xml @@ -14,8 +14,8 @@ UTF-8 1.8 1.8 - 1.8.0 - 1.4.0 + 1.18.0 + 2.0.0 kafka-function-20190419163130420 westus ${project.build.directory}/azure-functions/${functionAppName} diff --git a/samples/java/confluent/pom_windows.xml b/samples/java/confluent/pom_windows.xml index 336d505..e874b9d 100644 --- a/samples/java/confluent/pom_windows.xml +++ b/samples/java/confluent/pom_windows.xml @@ -14,8 +14,8 @@ UTF-8 1.8 1.8 - 1.8.0 - 1.4.0 + 1.18.0 + 2.0.0 kafka-function-20190419163130420 westus ${project.build.directory}/azure-functions/${functionAppName} diff --git a/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaOutputMany.java b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaOutputMany.java new file mode 100644 index 0000000..bbfa06e --- /dev/null +++ b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaOutputMany.java @@ -0,0 +1,31 @@ +package com.contoso.kafka; + +import java.util.*; +import com.microsoft.azure.functions.annotation.*; +import com.microsoft.azure.functions.*; + +import java.util.Optional; + +public class KafkaOutputMany { + @FunctionName("KafkaOutputMany") + public HttpResponseMessage run( + @HttpTrigger(name = "req", methods = {HttpMethod.GET, HttpMethod.POST}, authLevel = AuthorizationLevel.ANONYMOUS) HttpRequestMessage> request, + @KafkaOutput( + name = "kafkaOutput", + topic = "topic", + brokerList="%BrokerList%", + username = "%ConfluentCloudUsername%", + password = "ConfluentCloudPassword", + authenticationMode = BrokerAuthenticationMode.PLAIN, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + protocol = BrokerProtocol.SASLSSL + ) OutputBinding output, + final ExecutionContext context) { + context.getLogger().info("Java HTTP trigger processed a request."); + String[] messages = new String[2]; + messages[0] = "one"; + messages[1] = "two"; + output.setValue(messages); + return request.createResponseBuilder(HttpStatus.OK).body("Ok").build(); + } +} diff --git a/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaOutputManyWithHeaders.java b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaOutputManyWithHeaders.java new file mode 100644 index 0000000..caa6e74 --- /dev/null +++ b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaOutputManyWithHeaders.java @@ -0,0 +1,40 @@ +package com.contoso.kafka; + +import com.microsoft.azure.functions.annotation.*; +import com.contoso.kafka.entity.KafkaEntity; +import com.contoso.kafka.entity.KafkaHeaders; +import com.microsoft.azure.functions.*; + +import java.util.Optional; + +public class KafkaOutputManyWithHeaders { + @FunctionName("KafkaOutputManyWithHeaders") + public HttpResponseMessage run( + @HttpTrigger(name = "req", methods = {HttpMethod.GET, HttpMethod.POST}, authLevel = AuthorizationLevel.ANONYMOUS) HttpRequestMessage> request, + @KafkaOutput( + name = "kafkaOutput", + topic = "topic", + brokerList="%BrokerList%", + username = "%ConfluentCloudUsername%", + password = "ConfluentCloudPassword", + authenticationMode = BrokerAuthenticationMode.PLAIN, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + protocol = BrokerProtocol.SASLSSL + ) OutputBinding output, + final ExecutionContext context) { + context.getLogger().info("Java HTTP trigger processed a request."); + KafkaEntity[] kevents = new KafkaEntity[2]; + KafkaHeaders[] headersForEvent1 = new KafkaHeaders[1]; + headersForEvent1[0] = new KafkaHeaders("test", "java"); + KafkaEntity kevent1 = new KafkaEntity(364, 0, "topic", "2022-04-09T03:20:06.591Z", "one", headersForEvent1); + + KafkaHeaders[] headersForEvent2 = new KafkaHeaders[1]; + headersForEvent2[0] = new KafkaHeaders("test1", "java"); + KafkaEntity kevent2 = new KafkaEntity(364, 0, "topic", "2022-04-09T03:20:06.591Z", "two", headersForEvent2); + + kevents[0] = kevent1; + kevents[1] = kevent2; + output.setValue(kevents); + return request.createResponseBuilder(HttpStatus.OK).body("Ok").build(); + } +} diff --git a/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaOutputWithHeaders.java b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaOutputWithHeaders.java new file mode 100644 index 0000000..91a4a0a --- /dev/null +++ b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaOutputWithHeaders.java @@ -0,0 +1,36 @@ +package com.contoso.kafka; + +import com.microsoft.azure.functions.annotation.*; +import com.contoso.kafka.entity.KafkaEntity; +import com.contoso.kafka.entity.KafkaHeaders; +import com.microsoft.azure.functions.*; + +import java.util.Optional; + +public class KafkaOutputWithHeaders { + @FunctionName("KafkaOutputWithHeaders") + public HttpResponseMessage run( + @HttpTrigger(name = "req", methods = {HttpMethod.GET, HttpMethod.POST}, authLevel = AuthorizationLevel.ANONYMOUS) HttpRequestMessage> request, + @KafkaOutput( + name = "kafkaOutput", + topic = "topic", + brokerList="%BrokerList%", + username = "%ConfluentCloudUsername%", + password = "ConfluentCloudPassword", + authenticationMode = BrokerAuthenticationMode.PLAIN, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + protocol = BrokerProtocol.SASLSSL + ) OutputBinding output, + final ExecutionContext context) { + context.getLogger().info("Java HTTP trigger processed a request."); + + // Parse query parameter + String query = request.getQueryParameters().get("message"); + String message = request.getBody().orElse(query); + KafkaHeaders[] headers = new KafkaHeaders[1]; + headers[0] = new KafkaHeaders("test", "java"); + KafkaEntity kevent = new KafkaEntity(364, 0, "topic", "2022-04-09T03:20:06.591Z", message, headers); + output.setValue(kevent); + return request.createResponseBuilder(HttpStatus.OK).body("Ok").build(); + } +} diff --git a/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaTriggerMany.java b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaTriggerMany.java new file mode 100644 index 0000000..3a159a2 --- /dev/null +++ b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaTriggerMany.java @@ -0,0 +1,28 @@ +package com.contoso.kafka; + +import java.util.*; +import com.microsoft.azure.functions.annotation.*; +import com.microsoft.azure.functions.*; + +public class KafkaTriggerMany { + @FunctionName("KafkaTriggerMany") + public void runMany( + @KafkaTrigger( + name = "kafkaTriggerMany", + topic = "topic", + brokerList="%BrokerList%", + consumerGroup="$Default", + username = "%ConfluentCloudUsername%", + password = "ConfluentCloudPassword", + authenticationMode = BrokerAuthenticationMode.PLAIN, + protocol = BrokerProtocol.SASLSSL, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + cardinality = Cardinality.MANY, + dataType = "string" + ) String[] kafkaEvents, + final ExecutionContext context) { + for (String kevent: kafkaEvents) { + context.getLogger().info(kevent); + } + } +} diff --git a/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaTriggerManyWithHeaders.java b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaTriggerManyWithHeaders.java new file mode 100644 index 0000000..adfe8f5 --- /dev/null +++ b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaTriggerManyWithHeaders.java @@ -0,0 +1,39 @@ +package com.contoso.kafka; + +import java.util.*; +import com.microsoft.azure.functions.annotation.*; + +import com.contoso.kafka.entity.KafkaEntity; +import com.contoso.kafka.entity.KafkaHeaders; +import com.google.gson.Gson; +import com.microsoft.azure.functions.*; + +public class KafkaTriggerManyWithHeaders { + @FunctionName("KafkaTriggerManyWithHeaders") + public void runSingle( + @KafkaTrigger( + name = "KafkaTrigger", + topic = "topic", + brokerList="%BrokerList%", + consumerGroup="$Default", + username = "%ConfluentCloudUsername%", + password = "ConfluentCloudPassword", + authenticationMode = BrokerAuthenticationMode.PLAIN, + protocol = BrokerProtocol.SASLSSL, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + dataType = "string", + cardinality = Cardinality.MANY + ) List kafkaEvents, + final ExecutionContext context) { + Gson gson = new Gson(); + for (String keventstr: kafkaEvents) { + KafkaEntity kevent = gson.fromJson(keventstr, KafkaEntity.class); + context.getLogger().info("Java Kafka trigger function called for message: " + kevent.Value); + context.getLogger().info("Headers for the message:"); + for (KafkaHeaders header : kevent.Headers) { + String decodedValue = new String(Base64.getDecoder().decode(header.Value)); + context.getLogger().info("Key:" + header.Key + " Value:" + decodedValue); + } + } + } +} diff --git a/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaTriggerWithHeaders.java b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaTriggerWithHeaders.java new file mode 100644 index 0000000..9314e5d --- /dev/null +++ b/samples/java/confluent/src/main/java/com/contoso/kafka/KafkaTriggerWithHeaders.java @@ -0,0 +1,32 @@ +package com.contoso.kafka; + +import java.util.*; +import com.microsoft.azure.functions.annotation.*; +import com.contoso.kafka.entity.KafkaEntity; +import com.contoso.kafka.entity.KafkaHeaders; +import com.microsoft.azure.functions.*; + +public class KafkaTriggerWithHeaders { + @FunctionName("KafkaTriggerWithHeaders") + public void runSingle( + @KafkaTrigger( + name = "KafkaTrigger", + topic = "topic", + brokerList="%BrokerList%", + consumerGroup="$Default", + username = "%ConfluentCloudUsername%", + password = "ConfluentCloudPassword", + authenticationMode = BrokerAuthenticationMode.PLAIN, + protocol = BrokerProtocol.SASLSSL, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + dataType = "string" + ) KafkaEntity kafkaEventData, + final ExecutionContext context) { + context.getLogger().info("Java Kafka trigger function called for message: " + kafkaEventData.Value); + context.getLogger().info("Headers for the message:"); + for (KafkaHeaders header : kafkaEventData.Headers) { + String decodedValue = new String(Base64.getDecoder().decode(header.Value)); + context.getLogger().info("Key:" + header.Key + " Value:" + decodedValue); + } + } +} diff --git a/samples/java/confluent/src/main/java/com/contoso/kafka/FunctionOutput.java b/samples/java/confluent/src/main/java/com/contoso/kafka/SampleKafkaOutput.java similarity index 73% rename from samples/java/confluent/src/main/java/com/contoso/kafka/FunctionOutput.java rename to samples/java/confluent/src/main/java/com/contoso/kafka/SampleKafkaOutput.java index c9988b7..541a13c 100644 --- a/samples/java/confluent/src/main/java/com/contoso/kafka/FunctionOutput.java +++ b/samples/java/confluent/src/main/java/com/contoso/kafka/SampleKafkaOutput.java @@ -6,23 +6,23 @@ import com.microsoft.azure.functions.*; import java.util.Optional; -public class FunctionOutput { +public class SampleKafkaOutput { /** - * This function listens at endpoint "api/KafkaInput-Java" and send message to the conluent-topic. Two ways to invoke it using "curl" command in bash: - * 1. curl -d "HTTP BODY" {your host}/api/KafkaInput-Java - * 2. curl "{your host}/api/KafkaInput-Java?message=hello" + * This function listens at endpoint "api/KafkaOutput" and send message to the conluent-topic. Two ways to invoke it using "curl" command in bash: + * 1. curl -d "HTTP BODY" {your host}/api/KafkaOutput + * 2. curl "{your host}/api/KafkaOutput?message=hello" * This sample is for a local cluster. Modify topic and brokerList on the @KafkaOutput annotataion * For the Confluence Cloud example, please refer the KafkaTrigger-Java-Many on the `TriggerFunction.java`. */ - @FunctionName("KafkaInput-Java") - public HttpResponseMessage input( + @FunctionName("KafkaOutput") + public HttpResponseMessage run( @HttpTrigger(name = "req", methods = {HttpMethod.GET, HttpMethod.POST}, authLevel = AuthorizationLevel.ANONYMOUS) HttpRequestMessage> request, @KafkaOutput( name = "kafkaOutput", - topic = "message", + topic = "topic", brokerList="%BrokerList%", username = "%ConfluentCloudUsername%", - password = "%ConfluentCloudPassword%", + password = "ConfluentCloudPassword", authenticationMode = BrokerAuthenticationMode.PLAIN, // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. protocol = BrokerProtocol.SASLSSL @@ -35,6 +35,6 @@ public class FunctionOutput { String message = request.getBody().orElse(query); context.getLogger().info("Message:" + message); output.setValue(message); - return request.createResponseBuilder(HttpStatus.OK).body("Message Sent, " + message).build(); + return request.createResponseBuilder(HttpStatus.OK).body("Ok").build(); } } diff --git a/samples/java/confluent/src/main/java/com/contoso/kafka/TriggerFunction.java b/samples/java/confluent/src/main/java/com/contoso/kafka/SampleKafkaTrigger.java similarity index 69% rename from samples/java/confluent/src/main/java/com/contoso/kafka/TriggerFunction.java rename to samples/java/confluent/src/main/java/com/contoso/kafka/SampleKafkaTrigger.java index d87e6ff..73280c9 100644 --- a/samples/java/confluent/src/main/java/com/contoso/kafka/TriggerFunction.java +++ b/samples/java/confluent/src/main/java/com/contoso/kafka/SampleKafkaTrigger.java @@ -7,33 +7,30 @@ import com.microsoft.azure.functions.*; /** * Azure Functions with HTTP Trigger. */ -public class TriggerFunction { +public class SampleKafkaTrigger { /** * This function consume KafkaEvents on the confluent cloud. Create a local.settings.json or configure AppSettings for configring * BrokerList and UserName, and Password. The value wrapped with `%` will be replaced with enviornment variables. * For more details, refer https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-expressions-patterns#binding-expressions---app-settings - * The function is a sample of consuming kafkaEvent on batch. + * The function is a sample of consuming kafkaEvent. * @param kafkaEventData * @param context */ - @FunctionName("KafkaTrigger-Java-Many") - public void runMany( + @FunctionName("KafkaTrigger") + public void runSingle( @KafkaTrigger( - name = "kafkaTriggerMany", - topic = "message", + name = "KafkaTrigger", + topic = "topic", brokerList="%BrokerList%", consumerGroup="$Default", username = "%ConfluentCloudUsername%", - password = "%ConfluentCloudPassword%", + password = "ConfluentCloudPassword", authenticationMode = BrokerAuthenticationMode.PLAIN, protocol = BrokerProtocol.SASLSSL, // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. - cardinality = Cardinality.MANY, dataType = "string" - ) String[] kafkaEventData, + ) String kafkaEventData, final ExecutionContext context) { - for (String message: kafkaEventData) { - context.getLogger().info(message); - } + context.getLogger().info(kafkaEventData); } } diff --git a/samples/java/confluent/src/main/java/com/contoso/kafka/entity/KafkaEntity.java b/samples/java/confluent/src/main/java/com/contoso/kafka/entity/KafkaEntity.java new file mode 100644 index 0000000..a21e7c8 --- /dev/null +++ b/samples/java/confluent/src/main/java/com/contoso/kafka/entity/KafkaEntity.java @@ -0,0 +1,19 @@ +package com.contoso.kafka.entity; + +public class KafkaEntity { + public int Offset; + public int Partition; + public String Timestamp; + public String Topic; + public String Value; + public KafkaHeaders Headers[]; + + public KafkaEntity(int Offset, int Partition, String Topic, String Timestamp, String Value,KafkaHeaders[] headers) { + this.Offset = Offset; + this.Partition = Partition; + this.Topic = Topic; + this.Timestamp = Timestamp; + this.Value = Value; + this.Headers = headers; + } +} \ No newline at end of file diff --git a/samples/java/confluent/src/main/java/com/contoso/kafka/entity/KafkaHeaders.java b/samples/java/confluent/src/main/java/com/contoso/kafka/entity/KafkaHeaders.java new file mode 100644 index 0000000..5f1eb03 --- /dev/null +++ b/samples/java/confluent/src/main/java/com/contoso/kafka/entity/KafkaHeaders.java @@ -0,0 +1,12 @@ +package com.contoso.kafka.entity; + +public class KafkaHeaders{ + public String Key; + public String Value; + + public KafkaHeaders(String key, String value) { + this.Key = key; + this.Value = value; + } + +} \ No newline at end of file diff --git a/samples/java/eventhub/.gitignore b/samples/java/eventhub/.gitignore new file mode 100644 index 0000000..c8aa95a --- /dev/null +++ b/samples/java/eventhub/.gitignore @@ -0,0 +1,45 @@ +# Build output +target/ +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.ear +*.zip +*.tar.gz +*.rar + +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* + +# IDE +.idea/ +*.iml + +# macOS +.DS_Store + +# Azure Functions +local.settings.json +bin/ +obj/ + +tmp/ + +# Java +.classpath +.project +.settings + +# Azure Functions Core Tools Temp dir +azure-functions-core-tools \ No newline at end of file diff --git a/samples/java/eventhub/Dockerfile b/samples/java/eventhub/Dockerfile new file mode 100644 index 0000000..97242f2 --- /dev/null +++ b/samples/java/eventhub/Dockerfile @@ -0,0 +1,30 @@ +# based on https://github.com/Azure/azure-functions-docker/blob/master/host/2.0/alpine/amd64/java.Dockerfile + +ARG BASE_IMAGE=mcr.microsoft.com/azure-functions/base:2.0-alpine +FROM ${BASE_IMAGE} as runtime-image + +FROM openjdk:8-jdk-alpine as jdk +RUN mkdir -p /usr/lib/jvm/java-1.8-openjdk + +FROM mcr.microsoft.com/dotnet/core/runtime-deps:2.2-alpine + +RUN apk add --no-cache libc6-compat libnsl && \ + # workaround for https://github.com/grpc/grpc/issues/17255 + ln -s /usr/lib/libnsl.so.2 /usr/lib/libnsl.so.1 + +ENV AzureWebJobsScriptRoot=/home/site/wwwroot \ + HOME=/home \ + FUNCTIONS_WORKER_RUNTIME=java + +COPY --from=runtime-image [ "/azure-functions-host", "/azure-functions-host" ] +COPY --from=runtime-image [ "/workers/java", "/azure-functions-host/workers/java" ] + +COPY --from=jdk /usr/lib/jvm/java-1.8-openjdk /usr/lib/jvm/java-1.8-openjdk +ENV JAVA_HOME /usr/lib/jvm/java-1.8-openjdk + +# Kafka extension: Librdkafka +RUN apk update && apk add --no-cache librdkafka-dev +# Kafka extension: Adding files to /home/site/wwwroot +COPY . /home/site/wwwroot + +CMD [ "/azure-functions-host/Microsoft.Azure.WebJobs.Script.WebHost" ] \ No newline at end of file diff --git a/samples/java/eventhub/extensions.csproj b/samples/java/eventhub/extensions.csproj new file mode 100644 index 0000000..4bf4251 --- /dev/null +++ b/samples/java/eventhub/extensions.csproj @@ -0,0 +1,16 @@ + + + netcoreapp3.1 + + ** + + + + + + + diff --git a/samples/java/eventhub/host.json b/samples/java/eventhub/host.json new file mode 100644 index 0000000..d2059a4 --- /dev/null +++ b/samples/java/eventhub/host.json @@ -0,0 +1,3 @@ +{ + "version": "2.0" +} diff --git a/samples/java/eventhub/local.settings.json.example b/samples/java/eventhub/local.settings.json.example new file mode 100644 index 0000000..d75562c --- /dev/null +++ b/samples/java/eventhub/local.settings.json.example @@ -0,0 +1,10 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "java", + "BrokerList": ".servicebus.windows.net:9093", + "EventHubConnectionString": "", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} \ No newline at end of file diff --git a/samples/java/eventhub/pom.xml b/samples/java/eventhub/pom.xml new file mode 100644 index 0000000..1e57d8f --- /dev/null +++ b/samples/java/eventhub/pom.xml @@ -0,0 +1,193 @@ + + + 4.0.0 + + com.contoso.kafka + kafka-function + 1.0-SNAPSHOT + jar + + Azure Java Functions + + + UTF-8 + 1.8 + 1.8 + 1.18.0 + 2.0.0 + kafka-function-20190419163130420 + westus + ${project.build.directory}/azure-functions/${functionAppName} + java-functions-group + + + + + maven.snapshots + Maven Central Snapshot Repository + https://oss.sonatype.org/content/repositories/snapshots/ + + false + + + true + + + + + + + maven.snapshots + Maven Central Snapshot Repository + https://oss.sonatype.org/content/repositories/snapshots/ + + false + + + true + + + + + + + + com.microsoft.azure.functions + azure-functions-java-library + ${azure.functions.java.library.version} + + + + + + + com.microsoft.azure.functions + azure-functions-java-library + + + com.google.code.gson + gson + 2.9.0 + + + + + + + + com.microsoft.azure + azure-functions-maven-plugin + ${azure.functions.maven.plugin.version} + + + org.apache.maven.plugins + maven-resources-plugin + 3.1.0 + + + org.apache.maven.plugins + maven-dependency-plugin + 3.1.1 + + + + + + + com.microsoft.azure + azure-functions-maven-plugin + + ${functionResourceGroup} + ${functionAppName} + ${functionAppRegion} + windows + EP1 + + + + WEBSITE_RUN_FROM_PACKAGE + 1 + + + FUNCTIONS_EXTENSION_VERSION + ~3 + + + FUNCTIONS_WORKER_RUNTIME + java + + + + + + package-functions + + package + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-resources + package + + copy-resources + + + true + ${stagingDirectory} + + + ${project.basedir} + + host.json + local.settings.json + + + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + copy-dependencies + prepare-package + + copy-dependencies + + + ${stagingDirectory}/lib + false + false + true + runtime + azure-functions-java-library + + + + + + + maven-clean-plugin + 3.1.0 + + + + obj + + + + + + + + diff --git a/samples/java/eventhub/pom_linux.xml b/samples/java/eventhub/pom_linux.xml new file mode 100644 index 0000000..08295bb --- /dev/null +++ b/samples/java/eventhub/pom_linux.xml @@ -0,0 +1,210 @@ + + + 4.0.0 + + com.contoso.kafka + kafka-function + 1.0-SNAPSHOT + jar + + Azure Java Functions + + + UTF-8 + 1.8 + 1.8 + 1.18.0 + 2.0.0 + kafka-function-20190419163130420 + westus + ${project.build.directory}/azure-functions/${functionAppName} + java-functions-group + + + + + maven.snapshots + Maven Central Snapshot Repository + https://oss.sonatype.org/content/repositories/snapshots/ + + false + + + true + + + + + + + maven.snapshots + Maven Central Snapshot Repository + https://oss.sonatype.org/content/repositories/snapshots/ + + false + + + true + + + + + + + + junit + junit + 4.12 + + + org.mockito + mockito-core + 2.4.0 + + + com.microsoft.azure.functions + azure-functions-java-library + ${azure.functions.java.library.version} + + + + + + + com.microsoft.azure.functions + azure-functions-java-library + + + + + junit + junit + test + + + org.mockito + mockito-core + test + + + + + + + + com.microsoft.azure + azure-functions-maven-plugin + ${azure.functions.maven.plugin.version} + + + org.apache.maven.plugins + maven-resources-plugin + 3.1.0 + + + org.apache.maven.plugins + maven-dependency-plugin + 3.1.1 + + + + + + + com.microsoft.azure + azure-functions-maven-plugin + + ${functionResourceGroup} + ${functionAppName} + ${functionAppRegion} + linux + EP1 + + + + WEBSITE_RUN_FROM_PACKAGE + 1 + + + FUNCTIONS_EXTENSION_VERSION + ~3 + + + FUNCTIONS_WORKER_RUNTIME + java + + + + + + package-functions + + package + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-resources + package + + copy-resources + + + true + ${stagingDirectory} + + + ${project.basedir} + + host.json + local.settings.json + + + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + copy-dependencies + prepare-package + + copy-dependencies + + + ${stagingDirectory}/lib + false + false + true + runtime + azure-functions-java-library + + + + + + + maven-clean-plugin + 3.1.0 + + + + obj + + + + + + + + diff --git a/samples/java/eventhub/pom_windows.xml b/samples/java/eventhub/pom_windows.xml new file mode 100644 index 0000000..e874b9d --- /dev/null +++ b/samples/java/eventhub/pom_windows.xml @@ -0,0 +1,210 @@ + + + 4.0.0 + + com.contoso.kafka + kafka-function + 1.0-SNAPSHOT + jar + + Azure Java Functions + + + UTF-8 + 1.8 + 1.8 + 1.18.0 + 2.0.0 + kafka-function-20190419163130420 + westus + ${project.build.directory}/azure-functions/${functionAppName} + java-functions-group + + + + + maven.snapshots + Maven Central Snapshot Repository + https://oss.sonatype.org/content/repositories/snapshots/ + + false + + + true + + + + + + + maven.snapshots + Maven Central Snapshot Repository + https://oss.sonatype.org/content/repositories/snapshots/ + + false + + + true + + + + + + + + junit + junit + 4.12 + + + org.mockito + mockito-core + 2.4.0 + + + com.microsoft.azure.functions + azure-functions-java-library + ${azure.functions.java.library.version} + + + + + + + com.microsoft.azure.functions + azure-functions-java-library + + + + + junit + junit + test + + + org.mockito + mockito-core + test + + + + + + + + com.microsoft.azure + azure-functions-maven-plugin + ${azure.functions.maven.plugin.version} + + + org.apache.maven.plugins + maven-resources-plugin + 3.1.0 + + + org.apache.maven.plugins + maven-dependency-plugin + 3.1.1 + + + + + + + com.microsoft.azure + azure-functions-maven-plugin + + ${functionResourceGroup} + ${functionAppName} + ${functionAppRegion} + windows + EP1 + + + + WEBSITE_RUN_FROM_PACKAGE + 1 + + + FUNCTIONS_EXTENSION_VERSION + ~3 + + + FUNCTIONS_WORKER_RUNTIME + java + + + + + + package-functions + + package + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-resources + package + + copy-resources + + + true + ${stagingDirectory} + + + ${project.basedir} + + host.json + local.settings.json + + + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + copy-dependencies + prepare-package + + copy-dependencies + + + ${stagingDirectory}/lib + false + false + true + runtime + azure-functions-java-library + + + + + + + maven-clean-plugin + 3.1.0 + + + + obj + + + + + + + + diff --git a/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaOutputMany.java b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaOutputMany.java new file mode 100644 index 0000000..bcae923 --- /dev/null +++ b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaOutputMany.java @@ -0,0 +1,31 @@ +package com.contoso.kafka; + +import java.util.*; +import com.microsoft.azure.functions.annotation.*; +import com.microsoft.azure.functions.*; + +import java.util.Optional; + +public class KafkaOutputMany { + @FunctionName("KafkaOutputMany") + public HttpResponseMessage run( + @HttpTrigger(name = "req", methods = {HttpMethod.GET, HttpMethod.POST}, authLevel = AuthorizationLevel.ANONYMOUS) HttpRequestMessage> request, + @KafkaOutput( + name = "kafkaOutput", + topic = "topic", + brokerList="%BrokerList%", + username = "$ConnectionString", + password = "EventHubConnectionString", + authenticationMode = BrokerAuthenticationMode.PLAIN, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + protocol = BrokerProtocol.SASLSSL + ) OutputBinding output, + final ExecutionContext context) { + context.getLogger().info("Java HTTP trigger processed a request."); + String[] messages = new String[2]; + messages[0] = "one"; + messages[1] = "two"; + output.setValue(messages); + return request.createResponseBuilder(HttpStatus.OK).body("Ok").build(); + } +} diff --git a/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaOutputManyWithHeaders.java b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaOutputManyWithHeaders.java new file mode 100644 index 0000000..afe26fe --- /dev/null +++ b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaOutputManyWithHeaders.java @@ -0,0 +1,38 @@ +package com.contoso.kafka; + +import com.microsoft.azure.functions.annotation.*; +import com.microsoft.azure.functions.*; + +import java.util.Optional; + +public class KafkaOutputManyWithHeaders { + @FunctionName("KafkaOutputManyWithHeaders") + public HttpResponseMessage run( + @HttpTrigger(name = "req", methods = {HttpMethod.GET, HttpMethod.POST}, authLevel = AuthorizationLevel.ANONYMOUS) HttpRequestMessage> request, + @KafkaOutput( + name = "kafkaOutput", + topic = "topic", + brokerList="%BrokerList%", + username = "$ConnectionString", + password = "EventHubConnectionString", + authenticationMode = BrokerAuthenticationMode.PLAIN, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + protocol = BrokerProtocol.SASLSSL + ) OutputBinding output, + final ExecutionContext context) { + context.getLogger().info("Java HTTP trigger processed a request."); + KafkaEntity[] kevents = new KafkaEntity[2]; + KafkaHeaders[] headersForEvent1 = new KafkaHeaders[1]; + headersForEvent1[0] = new KafkaHeaders("test", "java"); + KafkaEntity kevent1 = new KafkaEntity(364, 0, "topic", "2022-04-09T03:20:06.591Z", "one", headersForEvent1); + + KafkaHeaders[] headersForEvent2 = new KafkaHeaders[1]; + headersForEvent2[0] = new KafkaHeaders("test1", "java"); + KafkaEntity kevent2 = new KafkaEntity(364, 0, "topic", "2022-04-09T03:20:06.591Z", "two", headersForEvent2); + + kevents[0] = kevent1; + kevents[1] = kevent2; + output.setValue(kevents); + return request.createResponseBuilder(HttpStatus.OK).body("Ok").build(); + } +} diff --git a/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaOutputWithHeaders.java b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaOutputWithHeaders.java new file mode 100644 index 0000000..dd7ea56 --- /dev/null +++ b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaOutputWithHeaders.java @@ -0,0 +1,34 @@ +package com.contoso.kafka; + +import com.microsoft.azure.functions.annotation.*; +import com.microsoft.azure.functions.*; + +import java.util.Optional; + +public class KafkaOutputWithHeaders { + @FunctionName("KafkaOutputWithHeaders") + public HttpResponseMessage run( + @HttpTrigger(name = "req", methods = {HttpMethod.GET, HttpMethod.POST}, authLevel = AuthorizationLevel.ANONYMOUS) HttpRequestMessage> request, + @KafkaOutput( + name = "kafkaOutput", + topic = "topic", + brokerList="%BrokerList%", + username= "$ConnectionString", + password = "EventHubConnectionString", + authenticationMode = BrokerAuthenticationMode.PLAIN, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + protocol = BrokerProtocol.SASLSSL + ) OutputBinding output, + final ExecutionContext context) { + context.getLogger().info("Java HTTP trigger processed a request."); + + // Parse query parameter + String query = request.getQueryParameters().get("message"); + String message = request.getBody().orElse(query); + KafkaHeaders[] headers = new KafkaHeaders[1]; + headers[0] = new KafkaHeaders("test", "java"); + KafkaEntity kevent = new KafkaEntity(364, 0, "topic", "2022-04-09T03:20:06.591Z", message, headers); + output.setValue(kevent); + return request.createResponseBuilder(HttpStatus.OK).body("Ok").build(); + } +} diff --git a/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaTriggerMany.java b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaTriggerMany.java new file mode 100644 index 0000000..2e450ef --- /dev/null +++ b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaTriggerMany.java @@ -0,0 +1,28 @@ +package com.contoso.kafka; + +import java.util.*; +import com.microsoft.azure.functions.annotation.*; +import com.microsoft.azure.functions.*; + +public class KafkaTriggerMany { + @FunctionName("KafkaTriggerMany") + public void runMany( + @KafkaTrigger( + name = "kafkaTriggerMany", + topic = "topic", + brokerList="%BrokerList%", + consumerGroup="$Default", + username = "$ConnectionString", + password = "EventHubConnectionString", + authenticationMode = BrokerAuthenticationMode.PLAIN, + protocol = BrokerProtocol.SASLSSL, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + cardinality = Cardinality.MANY, + dataType = "string" + ) String[] kafkaEvents, + final ExecutionContext context) { + for (String kevent: kafkaEvents) { + context.getLogger().info(kevent); + } + } +} diff --git a/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaTriggerManyWithHeaders.java b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaTriggerManyWithHeaders.java new file mode 100644 index 0000000..8522283 --- /dev/null +++ b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaTriggerManyWithHeaders.java @@ -0,0 +1,37 @@ +package com.contoso.kafka; + +import java.util.*; +import com.microsoft.azure.functions.annotation.*; + +import com.google.gson.Gson; +import com.microsoft.azure.functions.*; + +public class KafkaTriggerManyWithHeaders { + @FunctionName("KafkaTriggerManyWithHeaders") + public void runSingle( + @KafkaTrigger( + name = "KafkaTrigger", + topic = "topic", + brokerList="%BrokerList%", + consumerGroup="$Default", + username = "$ConnectionString", + password = "EventHubConnectionString", + authenticationMode = BrokerAuthenticationMode.PLAIN, + protocol = BrokerProtocol.SASLSSL, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + dataType = "string", + cardinality = Cardinality.MANY + ) List kafkaEvents, + final ExecutionContext context) { + Gson gson = new Gson(); + for (String keventstr: kafkaEvents) { + KafkaEntity kevent = gson.fromJson(keventstr, KafkaEntity.class); + context.getLogger().info("Java Kafka trigger function called for message: " + kevent.Value); + context.getLogger().info("Headers for the message:"); + for (KafkaHeaders header : kevent.Headers) { + String decodedValue = new String(Base64.getDecoder().decode(header.Value)); + context.getLogger().info("Key:" + header.Key + " Value:" + decodedValue); + } + } + } +} diff --git a/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaTriggerWithHeaders.java b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaTriggerWithHeaders.java new file mode 100644 index 0000000..0b53056 --- /dev/null +++ b/samples/java/eventhub/src/main/java/com/contoso/kafka/KafkaTriggerWithHeaders.java @@ -0,0 +1,30 @@ +package com.contoso.kafka; + +import java.util.*; +import com.microsoft.azure.functions.annotation.*; +import com.microsoft.azure.functions.*; + +public class KafkaTriggerWithHeaders { + @FunctionName("KafkaTriggerWithHeaders") + public void runSingle( + @KafkaTrigger( + name = "KafkaTrigger", + topic = "topic", + brokerList="%BrokerList%", + consumerGroup="$Default", + username= "$ConnectionString", + password = "EventHubConnectionString", + authenticationMode = BrokerAuthenticationMode.PLAIN, + protocol = BrokerProtocol.SASLSSL, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + dataType = "string" + ) KafkaEntity kafkaEventData, + final ExecutionContext context) { + context.getLogger().info("Java Kafka trigger function called for message: " + kafkaEventData.Value); + context.getLogger().info("Headers for the message:"); + for (KafkaHeaders header : kafkaEventData.Headers) { + String decodedValue = new String(Base64.getDecoder().decode(header.Value)); + context.getLogger().info("Key:" + header.Key + " Value:" + decodedValue); + } + } +} diff --git a/samples/java/eventhub/src/main/java/com/contoso/kafka/SampleKafkaOutput.java b/samples/java/eventhub/src/main/java/com/contoso/kafka/SampleKafkaOutput.java new file mode 100644 index 0000000..3c8a821 --- /dev/null +++ b/samples/java/eventhub/src/main/java/com/contoso/kafka/SampleKafkaOutput.java @@ -0,0 +1,38 @@ +package com.contoso.kafka; + +import java.util.*; +import com.microsoft.azure.functions.annotation.*; +import com.microsoft.azure.functions.*; + +import java.util.Optional; + +public class SampleKafkaOutput { + /** + * This function listens at endpoint "api/KafkaOutput" and send kafka message to the eventhub. Two ways to invoke it using "curl" command in bash: + * 1. curl -d "HTTP BODY" {your host}/api/KafkaOutput + * 2. curl "{your host}/api/KafkaOutput?message=hello" + */ + @FunctionName("KafkaOutput") + public HttpResponseMessage run( + @HttpTrigger(name = "req", methods = {HttpMethod.GET, HttpMethod.POST}, authLevel = AuthorizationLevel.ANONYMOUS) HttpRequestMessage> request, + @KafkaOutput( + name = "kafkaOutput", + topic = "topic", + brokerList="%BrokerList%", + username = "$ConnectionString", + password = "EventHubConnectionString", + authenticationMode = BrokerAuthenticationMode.PLAIN, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + protocol = BrokerProtocol.SASLSSL + ) OutputBinding output, + final ExecutionContext context) { + context.getLogger().info("Java HTTP trigger processed a request."); + + // Parse query parameter + String query = request.getQueryParameters().get("message"); + String message = request.getBody().orElse(query); + context.getLogger().info("Message:" + message); + output.setValue(message); + return request.createResponseBuilder(HttpStatus.OK).body("Ok").build(); + } +} diff --git a/samples/java/eventhub/src/main/java/com/contoso/kafka/SampleKafkaTrigger.java b/samples/java/eventhub/src/main/java/com/contoso/kafka/SampleKafkaTrigger.java new file mode 100644 index 0000000..5679ca2 --- /dev/null +++ b/samples/java/eventhub/src/main/java/com/contoso/kafka/SampleKafkaTrigger.java @@ -0,0 +1,36 @@ +package com.contoso.kafka; + +import java.util.*; +import com.microsoft.azure.functions.annotation.*; +import com.microsoft.azure.functions.*; + +/** + * Azure Functions with HTTP Trigger. + */ +public class SampleKafkaTrigger { + /** + * This function consume KafkaEvents on the confluent cloud. Create a local.settings.json or configure AppSettings for configring + * BrokerList and UserName, and Password. The value wrapped with `%` will be replaced with enviornment variables. + * For more details, refer https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-expressions-patterns#binding-expressions---app-settings + * The function is a sample of consuming kafkaEvent. + * @param kafkaEventData + * @param context + */ + @FunctionName("KafkaTrigger") + public void runSingle( + @KafkaTrigger( + name = "KafkaTrigger", + topic = "topic", + brokerList="%BrokerList%", + consumerGroup="$Default", + username = "$ConnectionString", + password = "EventHubConnectionString", + authenticationMode = BrokerAuthenticationMode.PLAIN, + protocol = BrokerProtocol.SASLSSL, + // sslCaLocation = "confluent_cloud_cacert.pem", // Enable this line for windows. + dataType = "string" + ) String kafkaEventData, + final ExecutionContext context) { + context.getLogger().info(kafkaEventData); + } +} diff --git a/samples/java/eventhub/src/main/java/com/contoso/kafka/entity/KafkaEntity.java b/samples/java/eventhub/src/main/java/com/contoso/kafka/entity/KafkaEntity.java new file mode 100644 index 0000000..b4ee562 --- /dev/null +++ b/samples/java/eventhub/src/main/java/com/contoso/kafka/entity/KafkaEntity.java @@ -0,0 +1,19 @@ +package com.contoso.kafka; + +public class KafkaEntity { + int Offset; + int Partition; + String Timestamp; + String Topic; + String Value; + KafkaHeaders Headers[]; + + public KafkaEntity(int Offset, int Partition, String Topic, String Timestamp, String Value,KafkaHeaders[] headers) { + this.Offset = Offset; + this.Partition = Partition; + this.Topic = Topic; + this.Timestamp = Timestamp; + this.Value = Value; + this.Headers = headers; + } +} \ No newline at end of file diff --git a/samples/java/eventhub/src/main/java/com/contoso/kafka/entity/KafkaHeaders.java b/samples/java/eventhub/src/main/java/com/contoso/kafka/entity/KafkaHeaders.java new file mode 100644 index 0000000..b096757 --- /dev/null +++ b/samples/java/eventhub/src/main/java/com/contoso/kafka/entity/KafkaHeaders.java @@ -0,0 +1,12 @@ +package com.contoso.kafka; + +public class KafkaHeaders{ + String Key; + String Value; + + public KafkaHeaders(String key, String value) { + this.Key = key; + this.Value = value; + } + +} \ No newline at end of file diff --git a/samples/java/eventhub/src/repo/com/microsoft/azure/functions/azure-functions-java-library-kafka/1.0.0/azure-functions-java-library-kafka-1.0.0.pom b/samples/java/eventhub/src/repo/com/microsoft/azure/functions/azure-functions-java-library-kafka/1.0.0/azure-functions-java-library-kafka-1.0.0.pom new file mode 100644 index 0000000..d153441 --- /dev/null +++ b/samples/java/eventhub/src/repo/com/microsoft/azure/functions/azure-functions-java-library-kafka/1.0.0/azure-functions-java-library-kafka-1.0.0.pom @@ -0,0 +1,9 @@ + + + 4.0.0 + com.microsoft.azure.functions + azure-functions-java-library-kafka + 1.0.0 + POM was created from install:install-file + diff --git a/samples/java/eventhub/src/repo/com/microsoft/azure/functions/azure-functions-java-library-kafka/maven-metadata-local.xml b/samples/java/eventhub/src/repo/com/microsoft/azure/functions/azure-functions-java-library-kafka/maven-metadata-local.xml new file mode 100644 index 0000000..a2d2898 --- /dev/null +++ b/samples/java/eventhub/src/repo/com/microsoft/azure/functions/azure-functions-java-library-kafka/maven-metadata-local.xml @@ -0,0 +1,12 @@ + + + com.microsoft.azure.functions + azure-functions-java-library-kafka + + 1.0.0 + + 1.0.0 + + 20200708041508 + + diff --git a/samples/javascript/KafkaOutput/function.confluent.json b/samples/javascript/KafkaOutput/function.confluent.json new file mode 100644 index 0000000..b013a6a --- /dev/null +++ b/samples/javascript/KafkaOutput/function.confluent.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username": "ConfluentCloudUsername", + "password": "ConfluentCloudPassword", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutput/function.eventhub.json b/samples/javascript/KafkaOutput/function.eventhub.json new file mode 100644 index 0000000..fab0e32 --- /dev/null +++ b/samples/javascript/KafkaOutput/function.eventhub.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutput/index.js b/samples/javascript/KafkaOutput/index.js new file mode 100644 index 0000000..f1b9779 --- /dev/null +++ b/samples/javascript/KafkaOutput/index.js @@ -0,0 +1,12 @@ +// This sample will create topic "topic" and send message to it. +// KafkaTrigger will be trigged. +module.exports = async function (context, req) { + context.log('JavaScript HTTP trigger function processed a request.'); + + const message = (req.query.message); + context.bindings.outputKafkaMessage = message; + context.res = { + // status: 200, /* Defaults to 200 */ + body: 'Ok' + }; +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutputMany/function.confluent.json b/samples/javascript/KafkaOutputMany/function.confluent.json new file mode 100644 index 0000000..eaf01d9 --- /dev/null +++ b/samples/javascript/KafkaOutputMany/function.confluent.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessages", + "brokerList": "BrokerList", + "topic": "topic", + "username": "ConfluentCloudUsername", + "password": "ConfluentCloudPassword", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutputMany/function.eventhub.json b/samples/javascript/KafkaOutputMany/function.eventhub.json new file mode 100644 index 0000000..c0444c3 --- /dev/null +++ b/samples/javascript/KafkaOutputMany/function.eventhub.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessages", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutputMany/index.js b/samples/javascript/KafkaOutputMany/index.js new file mode 100644 index 0000000..5aeebf0 --- /dev/null +++ b/samples/javascript/KafkaOutputMany/index.js @@ -0,0 +1,11 @@ +// This sample will create topic "topic" and send message to it. +// KafkaTrigger will be trigged. +module.exports = async function (context, req) { + context.log('JavaScript HTTP trigger function processed a request.'); + + context.bindings.outputKafkaMessages = ["one", "two"]; + context.res = { + // status: 200, /* Defaults to 200 */ + body: responseMessage + }; +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutputManyWithHeaders/function.confluent.json b/samples/javascript/KafkaOutputManyWithHeaders/function.confluent.json new file mode 100644 index 0000000..df840fc --- /dev/null +++ b/samples/javascript/KafkaOutputManyWithHeaders/function.confluent.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username": "ConfluentCloudUsername", + "password": "ConfluentCloudPassword", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out", + "dataType": "string" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutputManyWithHeaders/function.eventhub.json b/samples/javascript/KafkaOutputManyWithHeaders/function.eventhub.json new file mode 100644 index 0000000..6cff6bd --- /dev/null +++ b/samples/javascript/KafkaOutputManyWithHeaders/function.eventhub.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out", + "dataType": "string" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutputManyWithHeaders/index.js b/samples/javascript/KafkaOutputManyWithHeaders/index.js new file mode 100644 index 0000000..d952186 --- /dev/null +++ b/samples/javascript/KafkaOutputManyWithHeaders/index.js @@ -0,0 +1,14 @@ +// This sample will create topic "topic" and send message to it. +// KafkaTrigger will be trigged. +module.exports = async function (context, req) { + context.log('JavaScript HTTP trigger function processed a request.'); + + const message = (req.query.message || (req.body && req.body.message)); + const responseMessage = 'Ok' + context.bindings.outputKafkaMessage = ["{ \"Offset\":364,\"Partition\":0,\"Topic\":\"kafkaeventhubtest1\",\"Timestamp\":\"2022-04-09T03:20:06.591Z\", \"Value\": \"one\", \"Headers\": [{ \"Key\": \"test\", \"Value\": \"javascript\" }] }", + "{ \"Offset\":364,\"Partition\":0,\"Topic\":\"kafkaeventhubtest1\",\"Timestamp\":\"2022-04-09T03:20:06.591Z\", \"Value\": \"two\", \"Headers\": [{ \"Key\": \"test\", \"Value\": \"javascript\" }] }"] + context.res = { + // status: 200, /* Defaults to 200 */ + body: responseMessage + }; +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutputWithHeader/function.confluent.json b/samples/javascript/KafkaOutputWithHeader/function.confluent.json new file mode 100644 index 0000000..df840fc --- /dev/null +++ b/samples/javascript/KafkaOutputWithHeader/function.confluent.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username": "ConfluentCloudUsername", + "password": "ConfluentCloudPassword", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out", + "dataType": "string" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutputWithHeader/function.eventhub.json b/samples/javascript/KafkaOutputWithHeader/function.eventhub.json new file mode 100644 index 0000000..6cff6bd --- /dev/null +++ b/samples/javascript/KafkaOutputWithHeader/function.eventhub.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out", + "dataType": "string" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaOutputWithHeader/index.js b/samples/javascript/KafkaOutputWithHeader/index.js new file mode 100644 index 0000000..12af285 --- /dev/null +++ b/samples/javascript/KafkaOutputWithHeader/index.js @@ -0,0 +1,15 @@ +// This sample will create topic "topic" and send message to it. +// KafkaTrigger will be trigged. +module.exports = async function (context, req) { + context.log('JavaScript HTTP trigger function processed a request.'); + + const message = (req.query.message || (req.body && req.body.message)); + const responseMessage = message + ? "Message received: " + message + ". The message transfered to the kafka broker." + : "This HTTP triggered function executed successfully. Pass a message in the query string or in the request body for a personalized response."; + context.bindings.outputKafkaMessage = "{ \"Offset\":364,\"Partition\":0,\"Topic\":\"kafkaeventhubtest1\",\"Timestamp\":\"2022-04-09T03:20:06.591Z\", \"Value\": \"" + message + "\", \"Headers\": [{ \"Key\": \"test\", \"Value\": \"javascript\" }] }" + context.res = { + // status: 200, /* Defaults to 200 */ + body: responseMessage + }; +} \ No newline at end of file diff --git a/samples/javascript/UsersTrigger/function.json b/samples/javascript/KafkaTrigger/function.confluent.json similarity index 77% rename from samples/javascript/UsersTrigger/function.json rename to samples/javascript/KafkaTrigger/function.confluent.json index 0fd64f3..69d346f 100644 --- a/samples/javascript/UsersTrigger/function.json +++ b/samples/javascript/KafkaTrigger/function.confluent.json @@ -4,14 +4,14 @@ "type": "kafkaTrigger", "name": "event", "direction": "in", - "topic": "users", + "topic": "topic", "brokerList": "%BrokerList%", "username": "%ConfluentCloudUserName%", "password": "%ConfluentCloudPassword%", "protocol": "saslSsl", "authenticationMode": "plain", - "consumerGroup" : "functions", - "dataType": "binary" + "consumerGroup" : "$Default", + "dataType": "string" } ] } \ No newline at end of file diff --git a/samples/javascript/KafkaTrigger/function.eventhub.json b/samples/javascript/KafkaTrigger/function.eventhub.json new file mode 100644 index 0000000..5aeb76d --- /dev/null +++ b/samples/javascript/KafkaTrigger/function.eventhub.json @@ -0,0 +1,17 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "event", + "direction": "in", + "topic": "topic", + "brokerList": "%BrokerList%", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup" : "$Default", + "dataType": "string" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaTrigger/index.js b/samples/javascript/KafkaTrigger/index.js new file mode 100644 index 0000000..75fd8fc --- /dev/null +++ b/samples/javascript/KafkaTrigger/index.js @@ -0,0 +1,4 @@ +module.exports = async function (context, event) { + // context.log.info(event) + context.log.info(`JavaScript Kafka trigger function called for message ${event.Value}`); +}; \ No newline at end of file diff --git a/samples/javascript/UsersTriggerMany/function.json_ b/samples/javascript/KafkaTriggerMany/function.confluent.json similarity index 93% rename from samples/javascript/UsersTriggerMany/function.json_ rename to samples/javascript/KafkaTriggerMany/function.confluent.json index 88f83da..4ff947f 100644 --- a/samples/javascript/UsersTriggerMany/function.json_ +++ b/samples/javascript/KafkaTriggerMany/function.confluent.json @@ -7,7 +7,7 @@ "protocol" : "SASLSSL", "password" : "%ConfluentCloudPassword%", "dataType" : "string", - "topic" : "message", + "topic" : "topic", "authenticationMode" : "PLAIN", "cardinality" : "MANY", "consumerGroup" : "$Default", diff --git a/samples/javascript/KafkaTriggerMany/function.eventhub.json b/samples/javascript/KafkaTriggerMany/function.eventhub.json new file mode 100644 index 0000000..5cc388c --- /dev/null +++ b/samples/javascript/KafkaTriggerMany/function.eventhub.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "events", + "direction": "in", + "protocol" : "SASLSSL", + "dataType" : "string", + "topic" : "topic", + "authenticationMode" : "PLAIN", + "cardinality" : "MANY", + "consumerGroup" : "$Default", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "brokerList" : "%BrokerList%" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaTriggerMany/index.js b/samples/javascript/KafkaTriggerMany/index.js new file mode 100644 index 0000000..e2ef1e2 --- /dev/null +++ b/samples/javascript/KafkaTriggerMany/index.js @@ -0,0 +1,7 @@ +module.exports = async function (context, events) { + function print(event) { + var eventJson = JSON.parse(event) + context.log.info(`JavaScript Kafka trigger function called for message ${eventJson.Value}`); + } + events.map(print); +}; \ No newline at end of file diff --git a/samples/javascript/KafkaTriggerManyWithHeaders/function.confluent.json b/samples/javascript/KafkaTriggerManyWithHeaders/function.confluent.json new file mode 100644 index 0000000..b29b29c --- /dev/null +++ b/samples/javascript/KafkaTriggerManyWithHeaders/function.confluent.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "event", + "direction": "in", + "brokerList": "BrokerList", + "topic": "topic", + "username": "%ConfluentCloudUserName%", + "password": "%ConfluentCloudPassword%", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup": "$Default", + "dataType": "string", + "cardinality" : "MANY" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaTriggerManyWithHeaders/function.eventhub.json b/samples/javascript/KafkaTriggerManyWithHeaders/function.eventhub.json new file mode 100644 index 0000000..61eeac6 --- /dev/null +++ b/samples/javascript/KafkaTriggerManyWithHeaders/function.eventhub.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "event", + "direction": "in", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup": "$Default", + "dataType": "string", + "cardinality" : "MANY" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaTriggerManyWithHeaders/index.js b/samples/javascript/KafkaTriggerManyWithHeaders/index.js new file mode 100644 index 0000000..6a3183f --- /dev/null +++ b/samples/javascript/KafkaTriggerManyWithHeaders/index.js @@ -0,0 +1,12 @@ +module.exports = async function (context, event) { + function print(kevent) { + var keventJson = JSON.parse(kevent) + context.log.info(`JavaScript Kafka trigger function called for message ${keventJson.Value}`); + context.log.info(`Headers for this message:`) + let headers = keventJson.Headers; + headers.forEach(element => { + context.log.info(`Key: ${element.Key} Value:${Buffer.from(element.Value, 'base64')}`) + }); + } + event.map(print); +}; \ No newline at end of file diff --git a/samples/javascript/KafkaTriggerWithHeaders/function.confluent.json b/samples/javascript/KafkaTriggerWithHeaders/function.confluent.json new file mode 100644 index 0000000..b9582b8 --- /dev/null +++ b/samples/javascript/KafkaTriggerWithHeaders/function.confluent.json @@ -0,0 +1,16 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "event", + "direction": "in", + "brokerList": "BrokerList", + "username": "%ConfluentCloudUserName%", + "password": "%ConfluentCloudPassword%", + "protocol": "saslSsl", + "topic": "topic", + "consumerGroup": "$Default", + "dataType": "string" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaTriggerWithHeaders/function.eventhub.json b/samples/javascript/KafkaTriggerWithHeaders/function.eventhub.json new file mode 100644 index 0000000..3baf04a --- /dev/null +++ b/samples/javascript/KafkaTriggerWithHeaders/function.eventhub.json @@ -0,0 +1,17 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "event", + "direction": "in", + "brokerList": "BrokerList", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "saslSsl", + "topic": "topic", + "consumerGroup": "$Default", + "dataType": "string", + "authenticationMode": "plain" + } + ] +} \ No newline at end of file diff --git a/samples/javascript/KafkaTriggerWithHeaders/index.js b/samples/javascript/KafkaTriggerWithHeaders/index.js new file mode 100644 index 0000000..806e3c7 --- /dev/null +++ b/samples/javascript/KafkaTriggerWithHeaders/index.js @@ -0,0 +1,4 @@ +module.exports = async function (context, event) { + context.log.info(`JavaScript Kafka trigger function called for message ${event.Value}`); + context.log.info("Headers for this message:",context.bindingData.headers); +}; \ No newline at end of file diff --git a/samples/javascript/README.md b/samples/javascript/README.md index 8ed316e..85bd1cd 100644 --- a/samples/javascript/README.md +++ b/samples/javascript/README.md @@ -39,17 +39,17 @@ In the table below, `Kafka Cluster` local means that the sample users a Kafka cl | Name | Description | Kafka Cluster| Enabled | | ----- | --------------- | -------| ---| -| UsersTrigger | Simple Kafka trigger sample | local | yes | -| UsersTriggerMany | Kafka batch processing sample with Confluent Cloud | Confluent Cloud | no | +| KafkaTrigger | Simple Kafka trigger sample | local | yes | +| KafkaTriggerMany | Kafka batch processing sample with Confluent Cloud | Confluent Cloud | no | ### Modify function.json_ and local.settings.json -If you want to use the `UsersTriggerMany` sample, rename `UsersTriggerMany/function.json_` to `UsersTriggerMany/function.json`. This allows the Azure Functions Runtime to detect the function. +If you want to use the `KafkaTriggerMany` sample, rename `KafkaTriggerMany/function.json_` to `KafkaTriggerMany/function.json`. This allows the Azure Functions Runtime to detect the function. Then copy `local.settings.json.example` to `local.settings.json` and configure your [ccloud](https://docs.confluent.io/current/cloud/cli/index.html) environment. -### Modify UsersTriggerMany/function.json (Windows user only) +### Modify KafkaTriggerMany/function.json (Windows user only) If you want to run the sample on your Windows with Confluent Cloud and you are not using DevContainer, uncomment the following line. It is the settings of the CA certificate. .NET Core that is azure functions host language can not access the Windows registry, which means it can not access the CA certificate of the Confluent Cloud. diff --git a/samples/javascript/UsersTrigger/index.js b/samples/javascript/UsersTrigger/index.js deleted file mode 100644 index b0b7305..0000000 --- a/samples/javascript/UsersTrigger/index.js +++ /dev/null @@ -1,8 +0,0 @@ -var string_decode = require('string_decoder').StringDecoder; - -module.exports = async function (context, event) { - const dec = new string_decode('utf-8'); - let event_str = dec.write(event); - - context.log.info(`JavaScript Kafka trigger function called for message ${event_str}`); -}; \ No newline at end of file diff --git a/samples/javascript/UsersTriggerMany/index.js b/samples/javascript/UsersTriggerMany/index.js deleted file mode 100644 index 1a0bfab..0000000 --- a/samples/javascript/UsersTriggerMany/index.js +++ /dev/null @@ -1,10 +0,0 @@ -const { StringDecoder } = require('string_decoder'); - -module.exports = async function (context, event) { - function print(event) { - const dec = new StringDecoder('utf-8'); - let event_str = dec.write(event); - context.log.info(`JavaScript Kafka trigger function called for message ${event_str}`); - } - event.map(print); -}; \ No newline at end of file diff --git a/samples/javascript/extensions.csproj b/samples/javascript/extensions.csproj index 7a5e204..2a53db0 100644 --- a/samples/javascript/extensions.csproj +++ b/samples/javascript/extensions.csproj @@ -5,7 +5,7 @@ ** - + diff --git a/samples/javascript/local.settings.json.eventhub.example b/samples/javascript/local.settings.json.eventhub.example new file mode 100644 index 0000000..03ef301 --- /dev/null +++ b/samples/javascript/local.settings.json.eventhub.example @@ -0,0 +1,10 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "node", + "BrokerList": ".servicebus.windows.net:9093", + "EventHubConnectionString": "" + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} \ No newline at end of file diff --git a/samples/javascript/local.settings.json.example b/samples/javascript/local.settings.json.example index 412cfb7..36e2316 100644 --- a/samples/javascript/local.settings.json.example +++ b/samples/javascript/local.settings.json.example @@ -3,8 +3,9 @@ "Values": { "AzureWebJobsStorage": "UseDevelopmentStorage=true", "FUNCTIONS_WORKER_RUNTIME": "node", - "BrokerList": "{YOUR_CONFLUENT_CLOUD_BROKER}.eastus.azure.confluent.cloud:9092", + "BrokerList": "{YOUR_CONFLUENT_CLOUD_BROKER}", "ConfluentCloudUserName": "{YOUR_CONFLUENT_CLOUD_USERNAME}", - "ConfluentCloudPassword": "{YOUR_CONFLUENT_CLOUD_PASSWORD}" + "ConfluentCloudPassword": "{YOUR_CONFLUENT_CLOUD_PASSWORD}", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" } } \ No newline at end of file diff --git a/samples/powershell/KafkaOutput/function.confluent.json b/samples/powershell/KafkaOutput/function.confluent.json new file mode 100644 index 0000000..4e29b59 --- /dev/null +++ b/samples/powershell/KafkaOutput/function.confluent.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "Request", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password" : "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "Response" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaOutput/function.eventhub.json b/samples/powershell/KafkaOutput/function.eventhub.json new file mode 100644 index 0000000..a323038 --- /dev/null +++ b/samples/powershell/KafkaOutput/function.eventhub.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "Request", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "$ConnectionString", + "password" : "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "Response" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaOutput/run.ps1 b/samples/powershell/KafkaOutput/run.ps1 new file mode 100644 index 0000000..3bd7a0d --- /dev/null +++ b/samples/powershell/KafkaOutput/run.ps1 @@ -0,0 +1,19 @@ +using namespace System.Net + +# Input bindings are passed in via param block. +param($Request, $TriggerMetadata) + +# Write to the Azure Functions log stream. +Write-Host "PowerShell HTTP trigger function processed a request." + +# Interact with query parameters or the body of the request. +$message = $Request.Query.Message + +$message + +Push-OutputBinding -Name outputMessage -Value ($message) + +# Associate values to output bindings by calling 'Push-OutputBinding'. +Push-OutputBinding -Name Response -Value ([HttpResponseContext]@{ + StatusCode = [HttpStatusCode]::OK +}) \ No newline at end of file diff --git a/samples/powershell/KafkaOutputMany/function.confluent.json b/samples/powershell/KafkaOutputMany/function.confluent.json new file mode 100644 index 0000000..795aee8 --- /dev/null +++ b/samples/powershell/KafkaOutputMany/function.confluent.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "Request", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password" : "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "Response" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaOutputMany/function.eventhub.json b/samples/powershell/KafkaOutputMany/function.eventhub.json new file mode 100644 index 0000000..795aee8 --- /dev/null +++ b/samples/powershell/KafkaOutputMany/function.eventhub.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "Request", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password" : "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "Response" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaOutputMany/run.ps1 b/samples/powershell/KafkaOutputMany/run.ps1 new file mode 100644 index 0000000..b29d73e --- /dev/null +++ b/samples/powershell/KafkaOutputMany/run.ps1 @@ -0,0 +1,15 @@ +using namespace System.Net + +# Input bindings are passed in via param block. +param($Request, $TriggerMetadata) + +# Write to the Azure Functions log stream. +Write-Host "PowerShell HTTP trigger function processed a request." + +$message = @("one", "two") +Push-OutputBinding -Name outputMessage -Value ($message) + +# Associate values to output bindings by calling 'Push-OutputBinding'. +Push-OutputBinding -Name Response -Value ([HttpResponseContext]@{ + StatusCode = [HttpStatusCode]::OK +}) \ No newline at end of file diff --git a/samples/powershell/KafkaOutputManyWithHeaders/function.confluent.json b/samples/powershell/KafkaOutputManyWithHeaders/function.confluent.json new file mode 100644 index 0000000..b290a77 --- /dev/null +++ b/samples/powershell/KafkaOutputManyWithHeaders/function.confluent.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "Request", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "Message", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password" : "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "Response" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaOutputManyWithHeaders/function.eventhub.json b/samples/powershell/KafkaOutputManyWithHeaders/function.eventhub.json new file mode 100644 index 0000000..c21dbae --- /dev/null +++ b/samples/powershell/KafkaOutputManyWithHeaders/function.eventhub.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "Request", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "Message", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "Response" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaOutputManyWithHeaders/run.ps1 b/samples/powershell/KafkaOutputManyWithHeaders/run.ps1 new file mode 100644 index 0000000..78f5bf9 --- /dev/null +++ b/samples/powershell/KafkaOutputManyWithHeaders/run.ps1 @@ -0,0 +1,34 @@ +using namespace System.Net + +# Input bindings are passed in via param block. +param($Request, $TriggerMetadata) + +# Write to the Azure Functions log stream. +Write-Host "PowerShell HTTP trigger function processed a request." + +# Interact with query parameters or the body of the request. +$message = $Request.Query.Message +if (-not $message) { + $message = $Request.Body.Message +} + +$kevent = @{ + Offset = 364 + Partition = 0 + Topic = "kafkaeventhubtest1" + Timestamp = "2022-04-09T03:20:06.591Z" + Value = $message + Headers= @(@{ + Key= "test" + Value= "powershell" + } + ) +} + +Push-OutputBinding -Name Message -Value $kevent + +# Associate values to output bindings by calling 'Push-OutputBinding'. +Push-OutputBinding -Name Response -Value ([HttpResponseContext]@{ + StatusCode = [HttpStatusCode]::OK + Body = 'ok' +}) diff --git a/samples/powershell/KafkaOutputWithHeaders/function.confluent.json b/samples/powershell/KafkaOutputWithHeaders/function.confluent.json new file mode 100644 index 0000000..b290a77 --- /dev/null +++ b/samples/powershell/KafkaOutputWithHeaders/function.confluent.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "Request", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "Message", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password" : "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "Response" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaOutputWithHeaders/function.eventhub.json b/samples/powershell/KafkaOutputWithHeaders/function.eventhub.json new file mode 100644 index 0000000..c21dbae --- /dev/null +++ b/samples/powershell/KafkaOutputWithHeaders/function.eventhub.json @@ -0,0 +1,29 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "Request", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "Message", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "Response" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaOutputWithHeaders/run.ps1 b/samples/powershell/KafkaOutputWithHeaders/run.ps1 new file mode 100644 index 0000000..21eec67 --- /dev/null +++ b/samples/powershell/KafkaOutputWithHeaders/run.ps1 @@ -0,0 +1,42 @@ +using namespace System.Net + +# Input bindings are passed in via param block. +param($Request, $TriggerMetadata) + +# Write to the Azure Functions log stream. +Write-Host "PowerShell HTTP trigger function processed a request." + +$kevent1 = @{ + Offset = 364 + Partition = 0 + Topic = "kafkaeventhubtest1" + Timestamp = "2022-04-09T03:20:06.591Z" + Value = "one" + Headers= @(@{ + Key= "test" + Value= "powershell" + } + ) +} + +$kevent2 = @{ + Offset = 364 + Partition = 0 + Topic = "kafkaeventhubtest1" + Timestamp = "2022-04-09T03:20:06.591Z" + Value = "two" + Headers= @(@{ + Key= "test" + Value= "powershell" + } + ) +} + +$kevent= @($kevent1, $kevent2) +Push-OutputBinding -Name Message -Value $kevent + +# Associate values to output bindings by calling 'Push-OutputBinding'. +Push-OutputBinding -Name Response -Value ([HttpResponseContext]@{ + StatusCode = [HttpStatusCode]::OK + Body = 'ok' +}) diff --git a/samples/powershell/KafkaTrigger/function.confluent.json b/samples/powershell/KafkaTrigger/function.confluent.json new file mode 100644 index 0000000..b8b4e80 --- /dev/null +++ b/samples/powershell/KafkaTrigger/function.confluent.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kafkaEvent", + "direction": "in", + "protocol" : "SASLSSL", + "password" : "%ConfluentCloudPassword%", + "dataType" : "string", + "topic" : "topic", + "authenticationMode" : "PLAIN", + "consumerGroup" : "$Default", + "username" : "%ConfluentCloudUserName%", + "brokerList" : "%BrokerList%", + "sslCaLocation": "confluent_cloud_cacert.pem" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaTrigger/function.eventhub.json b/samples/powershell/KafkaTrigger/function.eventhub.json new file mode 100644 index 0000000..fda7601 --- /dev/null +++ b/samples/powershell/KafkaTrigger/function.eventhub.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kafkaEvent", + "direction": "in", + "protocol" : "SASLSSL", + "password" : "EventHubConnectionString", + "dataType" : "string", + "topic" : "topic", + "authenticationMode" : "PLAIN", + "consumerGroup" : "$Default", + "username" : "$ConnectionString", + "brokerList" : "%BrokerList%", + "sslCaLocation": "confluent_cloud_cacert.pem" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaTrigger/run.ps1 b/samples/powershell/KafkaTrigger/run.ps1 new file mode 100644 index 0000000..dee57d2 --- /dev/null +++ b/samples/powershell/KafkaTrigger/run.ps1 @@ -0,0 +1,5 @@ +using namespace System.Net + +param($kafkaEvent, $TriggerMetadata) + +Write-Output "Powershell Kafka trigger function called for message $kafkaEvent.Value" diff --git a/samples/powershell/kafkapwsh/function.json b/samples/powershell/KafkaTriggerMany/function.confluent.json similarity index 94% rename from samples/powershell/kafkapwsh/function.json rename to samples/powershell/KafkaTriggerMany/function.confluent.json index 99fe1c3..c085197 100644 --- a/samples/powershell/kafkapwsh/function.json +++ b/samples/powershell/KafkaTriggerMany/function.confluent.json @@ -7,7 +7,7 @@ "protocol" : "SASLSSL", "password" : "%ConfluentCloudPassword%", "dataType" : "string", - "topic" : "message", + "topic" : "topic", "authenticationMode" : "PLAIN", "cardinality" : "MANY", "consumerGroup" : "$Default", diff --git a/samples/powershell/KafkaTriggerMany/function.eventhub.json b/samples/powershell/KafkaTriggerMany/function.eventhub.json new file mode 100644 index 0000000..9f75062 --- /dev/null +++ b/samples/powershell/KafkaTriggerMany/function.eventhub.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kafkaEvents", + "direction": "in", + "protocol" : "SASLSSL", + "password" : "EventHubConnectionString", + "dataType" : "string", + "topic" : "topic", + "authenticationMode" : "PLAIN", + "cardinality" : "MANY", + "consumerGroup" : "$Default", + "username" : "$ConnectionString", + "brokerList" : "%BrokerList%" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaTriggerMany/run.ps1 b/samples/powershell/KafkaTriggerMany/run.ps1 new file mode 100644 index 0000000..e6f358a --- /dev/null +++ b/samples/powershell/KafkaTriggerMany/run.ps1 @@ -0,0 +1,9 @@ +using namespace System.Net + +param($kafkaEvents, $TriggerMetadata) + +$kafkaEvents +foreach ($kafkaEvent in $kafkaEvents) { + $event = $kafkaEvent | ConvertFrom-Json -AsHashtable + Write-Output "Powershell Kafka trigger function called for message $event.Value" +} \ No newline at end of file diff --git a/samples/powershell/KafkaTriggerManyWithHeaders/function.confluent.json b/samples/powershell/KafkaTriggerManyWithHeaders/function.confluent.json new file mode 100644 index 0000000..68cf430 --- /dev/null +++ b/samples/powershell/KafkaTriggerManyWithHeaders/function.confluent.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kafkaEvents", + "direction": "in", + "brokerList": "BrokerList", + "topic": "topic", + "username": "ConfluentCloudUserName", + "password": "ConfluentCloudPassword", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup": "$Default", + "cardinality" : "MANY", + "dataType" : "string" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaTriggerManyWithHeaders/function.eventhub.json b/samples/powershell/KafkaTriggerManyWithHeaders/function.eventhub.json new file mode 100644 index 0000000..05c4675 --- /dev/null +++ b/samples/powershell/KafkaTriggerManyWithHeaders/function.eventhub.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kafkaEvents", + "direction": "in", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup": "$Default", + "cardinality" : "MANY", + "dataType" : "string" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaTriggerManyWithHeaders/run.ps1 b/samples/powershell/KafkaTriggerManyWithHeaders/run.ps1 new file mode 100644 index 0000000..0a83e47 --- /dev/null +++ b/samples/powershell/KafkaTriggerManyWithHeaders/run.ps1 @@ -0,0 +1,14 @@ +using namespace System.Net + +param($kafkaEvents, $TriggerMetadata) + +foreach ($kafkaEvent in $kafkaEvents) { + $kevent = $kafkaEvent | ConvertFrom-Json -AsHashtable + Write-Output "Powershell Kafka trigger function called for message $kevent.Value" + Write-Output "Headers for this message:" + foreach ($header in $kevent.Headers) { + $DecodedValue = [System.Text.Encoding]::Unicode.GetString([System.Convert]::FromBase64String($header.Value)) + $Key = $header.Key + Write-Output "Key: $Key Value: $DecodedValue" + } +} diff --git a/samples/powershell/KafkaTriggerWithHeaders/function.confluent.json b/samples/powershell/KafkaTriggerWithHeaders/function.confluent.json new file mode 100644 index 0000000..1c37dee --- /dev/null +++ b/samples/powershell/KafkaTriggerWithHeaders/function.confluent.json @@ -0,0 +1,17 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kafkaEvent", + "direction": "in", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password": "EventHubConnectionString", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup": "$Default", + "dataType" : "string" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaTriggerWithHeaders/function.eventhub.json b/samples/powershell/KafkaTriggerWithHeaders/function.eventhub.json new file mode 100644 index 0000000..77eb2d7 --- /dev/null +++ b/samples/powershell/KafkaTriggerWithHeaders/function.eventhub.json @@ -0,0 +1,17 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kafkaEvent", + "direction": "in", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup": "$Default", + "dataType" : "string" + } + ] +} \ No newline at end of file diff --git a/samples/powershell/KafkaTriggerWithHeaders/run.ps1 b/samples/powershell/KafkaTriggerWithHeaders/run.ps1 new file mode 100644 index 0000000..4175038 --- /dev/null +++ b/samples/powershell/KafkaTriggerWithHeaders/run.ps1 @@ -0,0 +1,13 @@ +using namespace System.Net + +param($kafkaEvent, $TriggerMetadata) + +# $kafkaEvent +Write-Output "Powershell Kafka trigger function called for message" + $kafkaEvent.Value +Write-Output "Headers for this message:" +foreach ($header in $kafkaEvent.Headers) { + $DecodedValue = [System.Text.Encoding]::Unicode.GetString([System.Convert]::FromBase64String($header.Value)) + $Key = $header.Key + Write-Output "Key: $Key Value: $DecodedValue" +} +# $TriggerMetadata \ No newline at end of file diff --git a/samples/powershell/extensions.csproj b/samples/powershell/extensions.csproj index 2a67601..6fed398 100644 --- a/samples/powershell/extensions.csproj +++ b/samples/powershell/extensions.csproj @@ -6,7 +6,7 @@ ** - + diff --git a/samples/powershell/kafkapwsh/run.ps1 b/samples/powershell/kafkapwsh/run.ps1 deleted file mode 100644 index 6718c15..0000000 --- a/samples/powershell/kafkapwsh/run.ps1 +++ /dev/null @@ -1,7 +0,0 @@ -using namespace System.Net - -param($kafkaEvent, $TriggerMetadata) - -$kafkaEvent - -$TriggerMetadata \ No newline at end of file diff --git a/samples/powershell/local.settings.json.eventhub.example b/samples/powershell/local.settings.json.eventhub.example new file mode 100644 index 0000000..1b5aaba --- /dev/null +++ b/samples/powershell/local.settings.json.eventhub.example @@ -0,0 +1,10 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "powershell", + "BrokerList": ".servicebus.windows.net:9093", + "EventHubConnectionString": "" + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} \ No newline at end of file diff --git a/samples/powershell/local.settings.json.example b/samples/powershell/local.settings.json.example index 5c67a89..7686935 100644 --- a/samples/powershell/local.settings.json.example +++ b/samples/powershell/local.settings.json.example @@ -1,8 +1,11 @@ { "IsEncrypted": false, "Values": { - "BrokerList": "YOUR_BROKER_LIST_HERE", - "ConfluentCloudUsername": "YOUR_CONFLUENT_USER_NAME_HERE", - "ConfluentCloudPassword": "YOUR_CONFLUENT_PASSWORD_HERE" + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "powershell", + "BrokerList": "{YOUR_CONFLUENT_CLOUD_BROKER}", + "ConfluentCloudUserName": "{YOUR_CONFLUENT_CLOUD_USERNAME}", + "ConfluentCloudPassword": "{YOUR_CONFLUENT_CLOUD_PASSWORD}", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" } } \ No newline at end of file diff --git a/samples/python/KafkaOutput/function.confluent.json b/samples/python/KafkaOutput/function.confluent.json new file mode 100644 index 0000000..9e8b5f7 --- /dev/null +++ b/samples/python/KafkaOutput/function.confluent.json @@ -0,0 +1,30 @@ +{ + "scriptFile": "main.py", + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "direction": "out", + "name": "outputMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username": "%ConfluentCloudUserName%", + "password": "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN" + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaOutput/function.eventhub.json b/samples/python/KafkaOutput/function.eventhub.json new file mode 100644 index 0000000..40933d1 --- /dev/null +++ b/samples/python/KafkaOutput/function.eventhub.json @@ -0,0 +1,30 @@ +{ + "scriptFile": "main.py", + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "direction": "out", + "name": "outputMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN" + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaOutput/main.py b/samples/python/KafkaOutput/main.py new file mode 100644 index 0000000..380ac87 --- /dev/null +++ b/samples/python/KafkaOutput/main.py @@ -0,0 +1,9 @@ +import logging + +import azure.functions as func + + +def main(req: func.HttpRequest, outputMessage: func.Out[str]) -> func.HttpResponse: + input_msg = req.params.get('message') + outputMessage.set(input_msg) + return 'OK' \ No newline at end of file diff --git a/samples/python/KafkaOutputMany/function.confluent.json b/samples/python/KafkaOutputMany/function.confluent.json new file mode 100644 index 0000000..9e8b5f7 --- /dev/null +++ b/samples/python/KafkaOutputMany/function.confluent.json @@ -0,0 +1,30 @@ +{ + "scriptFile": "main.py", + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "direction": "out", + "name": "outputMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username": "%ConfluentCloudUserName%", + "password": "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN" + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaOutputMany/function.eventhub.json b/samples/python/KafkaOutputMany/function.eventhub.json new file mode 100644 index 0000000..40933d1 --- /dev/null +++ b/samples/python/KafkaOutputMany/function.eventhub.json @@ -0,0 +1,30 @@ +{ + "scriptFile": "main.py", + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "direction": "out", + "name": "outputMessage", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN" + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaOutputMany/main.py b/samples/python/KafkaOutputMany/main.py new file mode 100644 index 0000000..73674a6 --- /dev/null +++ b/samples/python/KafkaOutputMany/main.py @@ -0,0 +1,8 @@ +import logging +import typing +from azure.functions import Out, HttpRequest, HttpResponse +import json + +def main(req: HttpRequest, outputMessage: Out[str] ) -> HttpResponse: + outputMessage.set(['one', 'two']) + return 'OK' \ No newline at end of file diff --git a/samples/python/KafkaOutputManyWithHeaders/__init__.py b/samples/python/KafkaOutputManyWithHeaders/__init__.py new file mode 100644 index 0000000..506989d --- /dev/null +++ b/samples/python/KafkaOutputManyWithHeaders/__init__.py @@ -0,0 +1,11 @@ +import logging + +import azure.functions as func +import json + +def main(req: func.HttpRequest, out: func.Out[str]) -> func.HttpResponse: + message = req.params.get('message') + kevent = [{ "Offset": 364, "Partition":0,"Topic":"kafkaeventhubtest1","Timestamp":"2022-04-09T03:20:06.591Z", "Value": "one", "Headers": [{ "Key": "test", "Value": "python" }] }, + { "Offset": 364, "Partition":0,"Topic":"kafkaeventhubtest1","Timestamp":"2022-04-09T03:20:06.591Z", "Value": "two", "Headers": [{ "Key": "test", "Value": "python" }] }] + out.set(json.dumps(kevent)) + return 'OK' \ No newline at end of file diff --git a/samples/python/KafkaOutputManyWithHeaders/function.confluent.json b/samples/python/KafkaOutputManyWithHeaders/function.confluent.json new file mode 100644 index 0000000..46cf74e --- /dev/null +++ b/samples/python/KafkaOutputManyWithHeaders/function.confluent.json @@ -0,0 +1,30 @@ +{ + "scriptFile": "__init__.py", + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "direction": "out", + "name": "out", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password": "KafkaPassword", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN" + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaOutputManyWithHeaders/function.eventhub.json b/samples/python/KafkaOutputManyWithHeaders/function.eventhub.json new file mode 100644 index 0000000..b958eef --- /dev/null +++ b/samples/python/KafkaOutputManyWithHeaders/function.eventhub.json @@ -0,0 +1,30 @@ +{ + "scriptFile": "__init__.py", + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "direction": "out", + "name": "out", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN" + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaOutputWithHeaders/__init__.py b/samples/python/KafkaOutputWithHeaders/__init__.py new file mode 100644 index 0000000..8256436 --- /dev/null +++ b/samples/python/KafkaOutputWithHeaders/__init__.py @@ -0,0 +1,10 @@ +import logging + +import azure.functions as func +import json + +def main(req: func.HttpRequest, out: func.Out[str]) -> func.HttpResponse: + message = req.params.get('message') + kevent = { "Offset":364,"Partition":0,"Topic":"kafkaeventhubtest1","Timestamp":"2022-04-09T03:20:06.591Z", "Value": message, "Headers": [{ "Key": "test", "Value": "python" }] } + out.set(json.dumps(kevent)) + return 'OK' diff --git a/samples/python/KafkaOutputWithHeaders/function.confluent.json b/samples/python/KafkaOutputWithHeaders/function.confluent.json new file mode 100644 index 0000000..e15db9e --- /dev/null +++ b/samples/python/KafkaOutputWithHeaders/function.confluent.json @@ -0,0 +1,30 @@ +{ + "scriptFile": "__init__.py", + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "direction": "out", + "name": "out", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password": "ConfluentCloudPassword", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN" + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaOutputWithHeaders/function.eventhub.json b/samples/python/KafkaOutputWithHeaders/function.eventhub.json new file mode 100644 index 0000000..b958eef --- /dev/null +++ b/samples/python/KafkaOutputWithHeaders/function.eventhub.json @@ -0,0 +1,30 @@ +{ + "scriptFile": "__init__.py", + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "direction": "out", + "name": "out", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN" + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaTrigger/function.json b/samples/python/KafkaTrigger/function.confluent.json similarity index 93% rename from samples/python/KafkaTrigger/function.json rename to samples/python/KafkaTrigger/function.confluent.json index 9092cce..960062b 100644 --- a/samples/python/KafkaTrigger/function.json +++ b/samples/python/KafkaTrigger/function.confluent.json @@ -4,7 +4,7 @@ { "type": "kafkaTrigger", "name": "kevent", - "topic": "users", + "topic": "topic", "brokerList": "%BrokerList%", "username": "%ConfluentCloudUserName%", "password": "%ConfluentCloudPassword%", diff --git a/samples/python/KafkaTrigger/function.eventhub.json b/samples/python/KafkaTrigger/function.eventhub.json new file mode 100644 index 0000000..81ac534 --- /dev/null +++ b/samples/python/KafkaTrigger/function.eventhub.json @@ -0,0 +1,16 @@ +{ + "scriptFile": "main.py", + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kevent", + "topic": "topic", + "brokerList": "%BrokerList%", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "consumerGroup" : "functions", + "protocol": "saslSsl", + "authenticationMode": "plain" + } + ] +} diff --git a/samples/python/KafkaTriggerMany/function.json_ b/samples/python/KafkaTriggerMany/function.confluent.json similarity index 100% rename from samples/python/KafkaTriggerMany/function.json_ rename to samples/python/KafkaTriggerMany/function.confluent.json diff --git a/samples/python/KafkaTriggerMany/function.eventhub.json b/samples/python/KafkaTriggerMany/function.eventhub.json new file mode 100644 index 0000000..e24c4a2 --- /dev/null +++ b/samples/python/KafkaTriggerMany/function.eventhub.json @@ -0,0 +1,19 @@ +{ +   "scriptFile": "main.py", +   "bindings": [ +     { + "type" : "kafkaTrigger", + "direction": "in", + "name" : "kevents", + "protocol" : "SASLSSL", + "password" : "EventHubConnectionString", + "topic" : "message_python", + "authenticationMode" : "PLAIN", + "cardinality" : "MANY", + "dataType": "string", + "consumerGroup" : "$Default", + "username" : "$ConnectionString", + "BrokerList" : "%BrokerList%" +     } + ] +} diff --git a/samples/python/KafkaTriggerManyWithHeaders/__init__.py b/samples/python/KafkaTriggerManyWithHeaders/__init__.py new file mode 100644 index 0000000..cbde0cb --- /dev/null +++ b/samples/python/KafkaTriggerManyWithHeaders/__init__.py @@ -0,0 +1,14 @@ +import logging +import typing +from azure.functions import KafkaEvent +import json +import base64 + +def main(kevents : typing.List[KafkaEvent]): + for event in kevents: + event_dec = event.get_body().decode('utf-8') + event_json = json.loads(event_dec) + logging.info("Python Kafka trigger function called for message " + event_json["Value"]) + headers = event_json["Headers"] + for header in headers: + logging.info("Key: "+ header['Key'] + " Value: "+ str(base64.b64decode(header['Value']).decode('ascii'))) diff --git a/samples/python/KafkaTriggerManyWithHeaders/function.confluent.json b/samples/python/KafkaTriggerManyWithHeaders/function.confluent.json new file mode 100644 index 0000000..26a75e4 --- /dev/null +++ b/samples/python/KafkaTriggerManyWithHeaders/function.confluent.json @@ -0,0 +1,19 @@ +{ + "scriptFile": "__init__.py", + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kevents", + "direction": "in", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password": "ConfluentCloudPassword", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup": "$Default", + "cardinality" : "MANY", + "dataType": "string" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaTriggerManyWithHeaders/function.eventhub.json b/samples/python/KafkaTriggerManyWithHeaders/function.eventhub.json new file mode 100644 index 0000000..b01f1c6 --- /dev/null +++ b/samples/python/KafkaTriggerManyWithHeaders/function.eventhub.json @@ -0,0 +1,19 @@ +{ + "scriptFile": "__init__.py", + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kevents", + "direction": "in", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup": "$Default", + "cardinality" : "MANY", + "dataType": "string" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaTriggerWithHeaders/__init__.py b/samples/python/KafkaTriggerWithHeaders/__init__.py new file mode 100644 index 0000000..b8016b5 --- /dev/null +++ b/samples/python/KafkaTriggerWithHeaders/__init__.py @@ -0,0 +1,11 @@ +import logging +from azure.functions import KafkaEvent +import json +import base64 + + +def main(kevent : KafkaEvent): + logging.info("Python Kafka trigger function called for message " + kevent.metadata["Value"]) + headers = json.loads(kevent.metadata["Headers"]) + for header in headers: + logging.info("Key: "+ header['Key'] + " Value: "+ str(base64.b64decode(header['Value']).decode('ascii'))) diff --git a/samples/python/KafkaTriggerWithHeaders/function.confluent.json b/samples/python/KafkaTriggerWithHeaders/function.confluent.json new file mode 100644 index 0000000..cf15580 --- /dev/null +++ b/samples/python/KafkaTriggerWithHeaders/function.confluent.json @@ -0,0 +1,18 @@ +{ + "scriptFile": "__init__.py", + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kevent", + "direction": "in", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password": "ConfluentCloudPassword", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup": "$Default", + "dataType": "string" + } + ] +} \ No newline at end of file diff --git a/samples/python/KafkaTriggerWithHeaders/function.eventhub.json b/samples/python/KafkaTriggerWithHeaders/function.eventhub.json new file mode 100644 index 0000000..f66b974 --- /dev/null +++ b/samples/python/KafkaTriggerWithHeaders/function.eventhub.json @@ -0,0 +1,18 @@ +{ + "scriptFile": "__init__.py", + "bindings": [ + { + "type": "kafkaTrigger", + "name": "kevent", + "direction": "in", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "saslSsl", + "authenticationMode": "plain", + "consumerGroup": "$Default", + "dataType": "string" + } + ] +} \ No newline at end of file diff --git a/samples/python/extensions.csproj b/samples/python/extensions.csproj index 7a5e204..38c92ff 100644 --- a/samples/python/extensions.csproj +++ b/samples/python/extensions.csproj @@ -5,7 +5,7 @@ ** - + diff --git a/samples/python/local.settings.json.eventhub.example b/samples/python/local.settings.json.eventhub.example new file mode 100644 index 0000000..a49cf30 --- /dev/null +++ b/samples/python/local.settings.json.eventhub.example @@ -0,0 +1,10 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "python", + "BrokerList": ".servicebus.windows.net:9093", + "EventHubConnectionString": "" + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} \ No newline at end of file diff --git a/samples/python/local.settings.json.example b/samples/python/local.settings.json.example index 43d2914..ce0f405 100644 --- a/samples/python/local.settings.json.example +++ b/samples/python/local.settings.json.example @@ -1,10 +1,11 @@ { "IsEncrypted": false, "Values": { - "BrokerList": "{YOUR_CONFLUENT_CLOUD_BROKER}.eastus.azure.confluent.cloud:9092", + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "python", + "BrokerList": "{YOUR_CONFLUENT_CLOUD_BROKER}", "ConfluentCloudUserName": "{YOUR_CONFLUENT_CLOUD_USERNAME}", "ConfluentCloudPassword": "{YOUR_CONFLUENT_CLOUD_PASSWORD}", - "FUNCTIONS_WORKER_RUNTIME": "python", - "AzureWebJobsStorage": "" + "topic": "{YOUR_KAFKA_TOPIC_NAME}" } } \ No newline at end of file diff --git a/samples/typescript/kafka-trigger/.devcontainer/Dockerfile b/samples/typescript/.devcontainer/Dockerfile similarity index 100% rename from samples/typescript/kafka-trigger/.devcontainer/Dockerfile rename to samples/typescript/.devcontainer/Dockerfile diff --git a/samples/typescript/kafka-trigger/.devcontainer/devcontainer.json b/samples/typescript/.devcontainer/devcontainer.json similarity index 100% rename from samples/typescript/kafka-trigger/.devcontainer/devcontainer.json rename to samples/typescript/.devcontainer/devcontainer.json diff --git a/samples/typescript/kafka-trigger/.devcontainer/docker-compose.yml b/samples/typescript/.devcontainer/docker-compose.yml similarity index 100% rename from samples/typescript/kafka-trigger/.devcontainer/docker-compose.yml rename to samples/typescript/.devcontainer/docker-compose.yml diff --git a/samples/typescript/kafka-trigger/.dockerignore b/samples/typescript/.dockerignore similarity index 100% rename from samples/typescript/kafka-trigger/.dockerignore rename to samples/typescript/.dockerignore diff --git a/samples/typescript/kafka-trigger/.funcignore b/samples/typescript/.funcignore similarity index 100% rename from samples/typescript/kafka-trigger/.funcignore rename to samples/typescript/.funcignore diff --git a/samples/typescript/kafka-trigger/.gitignore b/samples/typescript/.gitignore similarity index 100% rename from samples/typescript/kafka-trigger/.gitignore rename to samples/typescript/.gitignore diff --git a/samples/typescript/kafka-trigger/.vscode/extensions.json b/samples/typescript/.vscode/extensions.json similarity index 100% rename from samples/typescript/kafka-trigger/.vscode/extensions.json rename to samples/typescript/.vscode/extensions.json diff --git a/samples/typescript/kafka-trigger/.vscode/launch.json b/samples/typescript/.vscode/launch.json similarity index 100% rename from samples/typescript/kafka-trigger/.vscode/launch.json rename to samples/typescript/.vscode/launch.json diff --git a/samples/typescript/kafka-trigger/.vscode/settings.json b/samples/typescript/.vscode/settings.json similarity index 100% rename from samples/typescript/kafka-trigger/.vscode/settings.json rename to samples/typescript/.vscode/settings.json diff --git a/samples/typescript/kafka-trigger/.vscode/tasks.json b/samples/typescript/.vscode/tasks.json similarity index 100% rename from samples/typescript/kafka-trigger/.vscode/tasks.json rename to samples/typescript/.vscode/tasks.json diff --git a/samples/typescript/KafkaOutput/function.confluent.json b/samples/typescript/KafkaOutput/function.confluent.json new file mode 100644 index 0000000..d1dcd6f --- /dev/null +++ b/samples/typescript/KafkaOutput/function.confluent.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "topic": "topic", + "brokerList": "%BrokerList%", + "username": "%ConfluentCloudUserName%", + "password": "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ], + "scriptFile": "../dist/KafkaOutput/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaOutput/function.eventhub.json b/samples/typescript/KafkaOutput/function.eventhub.json new file mode 100644 index 0000000..baa9bab --- /dev/null +++ b/samples/typescript/KafkaOutput/function.eventhub.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "topic": "topic", + "brokerList": "%BrokerList%", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ], + "scriptFile": "../dist/KafkaOutput/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaOutput/index.ts b/samples/typescript/KafkaOutput/index.ts new file mode 100644 index 0000000..5897592 --- /dev/null +++ b/samples/typescript/KafkaOutput/index.ts @@ -0,0 +1,14 @@ +import { AzureFunction, Context, HttpRequest } from "@azure/functions" + +const kafkaOutput: AzureFunction = async function (context: Context, req: HttpRequest): Promise { + const message = req.query.message; + const responseMessage = 'Ok' + context.bindings.outputKafkaMessage = message; + context.res = { + // status: 200, /* Defaults to 200 */ + body: responseMessage + }; + +}; + +export default kafkaOutput; \ No newline at end of file diff --git a/samples/typescript/KafkaOutputMany/function.confluent.json b/samples/typescript/KafkaOutputMany/function.confluent.json new file mode 100644 index 0000000..d1dcd6f --- /dev/null +++ b/samples/typescript/KafkaOutputMany/function.confluent.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "topic": "topic", + "brokerList": "%BrokerList%", + "username": "%ConfluentCloudUserName%", + "password": "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ], + "scriptFile": "../dist/KafkaOutput/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaOutputMany/function.eventhub.json b/samples/typescript/KafkaOutputMany/function.eventhub.json new file mode 100644 index 0000000..baa9bab --- /dev/null +++ b/samples/typescript/KafkaOutputMany/function.eventhub.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "topic": "topic", + "brokerList": "%BrokerList%", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ], + "scriptFile": "../dist/KafkaOutput/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaOutputMany/index.ts b/samples/typescript/KafkaOutputMany/index.ts new file mode 100644 index 0000000..e4fc11f --- /dev/null +++ b/samples/typescript/KafkaOutputMany/index.ts @@ -0,0 +1,13 @@ +import { AzureFunction, Context, HttpRequest } from "@azure/functions" + +const kafkaOutputMany: AzureFunction = async function (context: Context, req: HttpRequest): Promise { + const responseMessage = 'Ok' + context.bindings.outputKafkaMessage = ['one', 'two']; + context.res = { + // status: 200, /* Defaults to 200 */ + body: responseMessage + }; + +}; + +export default kafkaOutputMany; \ No newline at end of file diff --git a/samples/typescript/KafkaOutputManyWithHeaders/function.confluent.json b/samples/typescript/KafkaOutputManyWithHeaders/function.confluent.json new file mode 100644 index 0000000..c6c01b3 --- /dev/null +++ b/samples/typescript/KafkaOutputManyWithHeaders/function.confluent.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "topic": "topic", + "brokerList": "%BrokerList%", + "username" : "%ConfluentCloudUserName%", + "password" : "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ], + "scriptFile": "../dist/KafkaOutputManyWithHeaders/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaOutputManyWithHeaders/function.eventhub.json b/samples/typescript/KafkaOutputManyWithHeaders/function.eventhub.json new file mode 100644 index 0000000..f12882d --- /dev/null +++ b/samples/typescript/KafkaOutputManyWithHeaders/function.eventhub.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "topic": "topic", + "brokerList": "%BrokerList%", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ], + "scriptFile": "../dist/KafkaOutputManyWithHeaders/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaOutputManyWithHeaders/index.ts b/samples/typescript/KafkaOutputManyWithHeaders/index.ts new file mode 100644 index 0000000..e37f833 --- /dev/null +++ b/samples/typescript/KafkaOutputManyWithHeaders/index.ts @@ -0,0 +1,14 @@ +import { AzureFunction, Context, HttpRequest } from "@azure/functions" + +const kafkaOutputMany: AzureFunction = async function (context: Context, req: HttpRequest): Promise { + const responseMessage = 'Ok' + context.bindings.outputKafkaMessage = ["{ \"Offset\":364,\"Partition\":0,\"Topic\":\"kafkaeventhubtest1\",\"Timestamp\":\"2022-04-09T03:20:06.591Z\", \"Value\": \"one\", \"Headers\": [{ \"Key\": \"test\", \"Value\": \"typescript\" }] }", + "{ \"Offset\":364,\"Partition\":0,\"Topic\":\"kafkaeventhubtest1\",\"Timestamp\":\"2022-04-09T03:20:06.591Z\", \"Value\": \"two\", \"Headers\": [{ \"Key\": \"test\", \"Value\": \"typescript\" }] }"]; + context.res = { + // status: 200, /* Defaults to 200 */ + body: responseMessage + }; + +}; + +export default kafkaOutputMany; \ No newline at end of file diff --git a/samples/typescript/KafkaOutputWithHeaders/function.confluent.json b/samples/typescript/KafkaOutputWithHeaders/function.confluent.json new file mode 100644 index 0000000..96744e3 --- /dev/null +++ b/samples/typescript/KafkaOutputWithHeaders/function.confluent.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "topic": "topic", + "brokerList": "%BrokerList%", + "username" : "%ConfluentCloudUserName%", + "password" : "%ConfluentCloudPassword%", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ], + "scriptFile": "../dist/KafkaOutputWithHeaders/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaOutputWithHeaders/function.eventhub.json b/samples/typescript/KafkaOutputWithHeaders/function.eventhub.json new file mode 100644 index 0000000..d8b37df --- /dev/null +++ b/samples/typescript/KafkaOutputWithHeaders/function.eventhub.json @@ -0,0 +1,30 @@ +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get" + ] + }, + { + "type": "kafka", + "name": "outputKafkaMessage", + "topic": "topic", + "brokerList": "%BrokerList%", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "protocol": "SASLSSL", + "authenticationMode": "PLAIN", + "direction": "out" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ], + "scriptFile": "../dist/KafkaOutputWithHeaders/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaOutputWithHeaders/index.ts b/samples/typescript/KafkaOutputWithHeaders/index.ts new file mode 100644 index 0000000..f5acb54 --- /dev/null +++ b/samples/typescript/KafkaOutputWithHeaders/index.ts @@ -0,0 +1,14 @@ +import { AzureFunction, Context, HttpRequest } from "@azure/functions" + +const kafkaOutput: AzureFunction = async function (context: Context, req: HttpRequest): Promise { + const message = req.query.message; + const responseMessage = 'Ok' + context.bindings.outputKafkaMessage = "{ \"Offset\":364,\"Partition\":0,\"Topic\":\"kafkaeventhubtest1\",\"Timestamp\":\"2022-04-09T03:20:06.591Z\", \"Value\": \"" + message + "\", \"Headers\": [{ \"Key\": \"test\", \"Value\": \"typescript\" }] }"; + context.res = { + // status: 200, /* Defaults to 200 */ + body: responseMessage + }; + +}; + +export default kafkaOutput; \ No newline at end of file diff --git a/samples/typescript/kafka-trigger/UsersTrigger/function.json b/samples/typescript/KafkaTrigger/function.confluent.json similarity index 83% rename from samples/typescript/kafka-trigger/UsersTrigger/function.json rename to samples/typescript/KafkaTrigger/function.confluent.json index 5f91101..6c9e60c 100644 --- a/samples/typescript/kafka-trigger/UsersTrigger/function.json +++ b/samples/typescript/KafkaTrigger/function.confluent.json @@ -4,7 +4,7 @@ "type": "kafkaTrigger", "direction": "in", "name": "event", - "topic": "users", + "topic": "topic", "brokerList": "%BrokerList%", "username": "%ConfluentCloudUserName%", "password": "%ConfluentCloudPassword%", @@ -14,5 +14,5 @@ "dataType": "string" } ], - "scriptFile": "../dist/UsersTrigger/index.js" + "scriptFile": "../dist/KafkaTrigger/index.js" } \ No newline at end of file diff --git a/samples/typescript/KafkaTrigger/function.eventhub.json b/samples/typescript/KafkaTrigger/function.eventhub.json new file mode 100644 index 0000000..d5ccac9 --- /dev/null +++ b/samples/typescript/KafkaTrigger/function.eventhub.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "direction": "in", + "name": "event", + "topic": "topic", + "brokerList": "%BrokerList%", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "consumerGroup" : "functions", + "protocol": "saslSsl", + "authenticationMode": "plain", + "dataType": "string" + } + ], + "scriptFile": "../dist/KafkaTrigger/index.js" + } \ No newline at end of file diff --git a/samples/typescript/kafka-trigger/UsersTrigger/index.ts b/samples/typescript/KafkaTrigger/index.ts similarity index 100% rename from samples/typescript/kafka-trigger/UsersTrigger/index.ts rename to samples/typescript/KafkaTrigger/index.ts diff --git a/samples/typescript/kafka-trigger/UsersTriggerMany/function.json b/samples/typescript/KafkaTriggerMany/function.confluent.json similarity index 83% rename from samples/typescript/kafka-trigger/UsersTriggerMany/function.json rename to samples/typescript/KafkaTriggerMany/function.confluent.json index 3cbf30f..90de1d9 100644 --- a/samples/typescript/kafka-trigger/UsersTriggerMany/function.json +++ b/samples/typescript/KafkaTriggerMany/function.confluent.json @@ -4,7 +4,7 @@ "type": "kafkaTrigger", "direction": "in", "name": "event", - "topic": "users", + "topic": "topic", "brokerList": "%BrokerList%", "username": "%ConfluentCloudUserName%", "password": "%ConfluentCloudPassword%", @@ -15,5 +15,5 @@ "dataType": "string" } ], - "scriptFile": "../dist/UsersTriggerMany/index.js" + "scriptFile": "../dist/KafkaTriggerMany/index.js" } \ No newline at end of file diff --git a/samples/typescript/KafkaTriggerMany/function.eventhub.json b/samples/typescript/KafkaTriggerMany/function.eventhub.json new file mode 100644 index 0000000..adb64a9 --- /dev/null +++ b/samples/typescript/KafkaTriggerMany/function.eventhub.json @@ -0,0 +1,19 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "direction": "in", + "name": "event", + "topic": "topic", + "brokerList": "%BrokerList%", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "consumerGroup" : "functions", + "protocol": "saslSsl", + "authenticationMode": "plain", + "cardinality": "MANY", + "dataType": "string" + } + ], + "scriptFile": "../dist/KafkaTriggerMany/index.js" +} \ No newline at end of file diff --git a/samples/typescript/kafka-trigger/UsersTriggerMany/index.ts b/samples/typescript/KafkaTriggerMany/index.ts similarity index 100% rename from samples/typescript/kafka-trigger/UsersTriggerMany/index.ts rename to samples/typescript/KafkaTriggerMany/index.ts diff --git a/samples/typescript/KafkaTriggerManyWithHeaders/function.confluent.json b/samples/typescript/KafkaTriggerManyWithHeaders/function.confluent.json new file mode 100644 index 0000000..fa25447 --- /dev/null +++ b/samples/typescript/KafkaTriggerManyWithHeaders/function.confluent.json @@ -0,0 +1,19 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "direction": "in", + "name": "event", + "brokerList": "BrokerList", + "topic": "topic", + "username" : "%ConfluentCloudUserName%", + "password": "ConfluentCloudPassword", + "consumerGroup" : "$Default", + "protocol": "saslSsl", + "authenticationMode": "plain", + "cardinality": "MANY", + "dataType": "string" + } + ], + "scriptFile": "../dist/KafkaTriggerManyWithHeaders/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaTriggerManyWithHeaders/function.eventhub.json b/samples/typescript/KafkaTriggerManyWithHeaders/function.eventhub.json new file mode 100644 index 0000000..46487a2 --- /dev/null +++ b/samples/typescript/KafkaTriggerManyWithHeaders/function.eventhub.json @@ -0,0 +1,19 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "direction": "in", + "name": "event", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "consumerGroup" : "$Default", + "protocol": "saslSsl", + "authenticationMode": "plain", + "cardinality": "MANY", + "dataType": "string" + } + ], + "scriptFile": "../dist/KafkaTriggerManyWithHeaders/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaTriggerManyWithHeaders/index.ts b/samples/typescript/KafkaTriggerManyWithHeaders/index.ts new file mode 100644 index 0000000..432a42a --- /dev/null +++ b/samples/typescript/KafkaTriggerManyWithHeaders/index.ts @@ -0,0 +1,46 @@ +import { AzureFunction, Context } from "@azure/functions" + +// This is to describe the metadata of a Kafka event +class KafkaHeaders { + Key: string; + Value: string; +} + +class KafkaEvent { + Offset : number; + Partition : number; + Topic : string; + Timestamp : string; + Value : string; + Headers: KafkaHeaders[]; + + constructor(metadata:any) { + this.Offset = metadata.Offset; + this.Partition = metadata.Partition; + this.Topic = metadata.Topic; + this.Timestamp = metadata.Timestamp; + this.Value = metadata.Value; + this.Headers = metadata.Headers; + } + + public getValue() : T { + return JSON.parse(this.Value).payload; + } +} + +const kafkaTrigger: AzureFunction = async function (context: Context, event_str: string[]): Promise { + for(var event of event_str) { + let event_obj = new KafkaEvent(eval(JSON.parse(event))); + context.log("Event Offset: " + event_obj.Offset); + context.log("Event Partition: " + event_obj.Partition); + context.log("Event Topic: " + event_obj.Topic); + context.log("Event Timestamp: " + event_obj.Timestamp); + context.log("Event Value (as string): " + event_obj.Value); + context.log("Event Headers: "); + event_obj.Headers.forEach((header: KafkaHeaders) => { + context.log("Key: ", header.Key, "Value: ", atob(header.Value)) + }); + } +}; + +export default kafkaTrigger; \ No newline at end of file diff --git a/samples/typescript/KafkaTriggerWithHeaders/function.confluent.json b/samples/typescript/KafkaTriggerWithHeaders/function.confluent.json new file mode 100644 index 0000000..e91e3f9 --- /dev/null +++ b/samples/typescript/KafkaTriggerWithHeaders/function.confluent.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "direction": "in", + "name": "event", + "brokerList": "BrokerList", + "topic": "topic", + "username": "%ConfluentCloudUserName%", + "password": "ConfluentCloudPassword", + "consumerGroup" : "$Default", + "protocol": "saslSsl", + "authenticationMode": "plain", + "dataType": "string" + } + ], + "scriptFile": "../dist/KafkaTriggerWithHeaders/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaTriggerWithHeaders/function.eventhub.json b/samples/typescript/KafkaTriggerWithHeaders/function.eventhub.json new file mode 100644 index 0000000..158e93b --- /dev/null +++ b/samples/typescript/KafkaTriggerWithHeaders/function.eventhub.json @@ -0,0 +1,18 @@ +{ + "bindings": [ + { + "type": "kafkaTrigger", + "direction": "in", + "name": "event", + "brokerList": "BrokerList", + "topic": "topic", + "username": "$ConnectionString", + "password": "EventHubConnectionString", + "consumerGroup" : "$Default", + "protocol": "saslSsl", + "authenticationMode": "plain", + "dataType": "string" + } + ], + "scriptFile": "../dist/KafkaTriggerWithHeaders/index.js" +} \ No newline at end of file diff --git a/samples/typescript/KafkaTriggerWithHeaders/index.ts b/samples/typescript/KafkaTriggerWithHeaders/index.ts new file mode 100644 index 0000000..415117b --- /dev/null +++ b/samples/typescript/KafkaTriggerWithHeaders/index.ts @@ -0,0 +1,44 @@ +import { AzureFunction, Context } from "@azure/functions" + +class KafkaHeaders { + Key: string; + Value: string; +} + +// This is to describe the metadata of a Kafka event +class KafkaEvent { + Offset : number; + Partition : number; + Topic : string; + Timestamp : string; + Value : string; + Headers: KafkaHeaders[]; + + constructor(metadata:any) { + this.Offset = metadata.Offset; + this.Partition = metadata.Partition; + this.Topic = metadata.Topic; + this.Timestamp = metadata.Timestamp; + this.Value = metadata.Value; + this.Headers = metadata.Headers; + } + + public getValue() : T { + return JSON.parse(this.Value).payload; + } +} + +const kafkaTrigger: AzureFunction = async function (context: Context, event: string): Promise { + let event_obj = new KafkaEvent(eval(event)); + context.log("Event Offset: " + event_obj.Offset); + context.log("Event Partition: " + event_obj.Partition); + context.log("Event Topic: " + event_obj.Topic); + context.log("Event Timestamp: " + event_obj.Timestamp); + context.log("Event Value (as string): " + event_obj.Value); + context.log("Event Headers: "); + event_obj.Headers.forEach((header: KafkaHeaders) => { + context.log("Key: ", header.Key, "Value: ", atob(header.Value)) + }); +}; + +export default kafkaTrigger; \ No newline at end of file diff --git a/samples/typescript/kafka-trigger/NuGet.config b/samples/typescript/NuGet.config similarity index 100% rename from samples/typescript/kafka-trigger/NuGet.config rename to samples/typescript/NuGet.config diff --git a/samples/typescript/kafka-trigger/README.md b/samples/typescript/README.md similarity index 91% rename from samples/typescript/kafka-trigger/README.md rename to samples/typescript/README.md index bade829..d9bb5b0 100644 --- a/samples/typescript/kafka-trigger/README.md +++ b/samples/typescript/README.md @@ -8,8 +8,8 @@ We provide two samples. | Name | Description | Kafka Cluster| Enabled | | ----- | --------------- | -------| ---| -| UsersTrigger | Simple Kafka trigger sample | local | yes | -| UsersTriggerMany | Kafka batch processing sample | local | yes | +| KafkaTrigger | Simple Kafka trigger sample | local | yes | +| KafkaTriggerMany | Kafka batch processing sample | local | yes | ## Difference between Javascript and TypeScript diff --git a/samples/typescript/kafka-trigger/extensions.csproj b/samples/typescript/extensions.csproj similarity index 95% rename from samples/typescript/kafka-trigger/extensions.csproj rename to samples/typescript/extensions.csproj index 7a5e204..38c92ff 100644 --- a/samples/typescript/kafka-trigger/extensions.csproj +++ b/samples/typescript/extensions.csproj @@ -5,7 +5,7 @@ ** - + diff --git a/samples/typescript/kafka-trigger/host.json b/samples/typescript/host.json similarity index 100% rename from samples/typescript/kafka-trigger/host.json rename to samples/typescript/host.json diff --git a/samples/typescript/kafka-trigger/local.settings.json.example b/samples/typescript/kafka-trigger/local.settings.json.example deleted file mode 100644 index a44e474..0000000 --- a/samples/typescript/kafka-trigger/local.settings.json.example +++ /dev/null @@ -1,10 +0,0 @@ -{ - "IsEncrypted": false, - "Values": { - "BrokerList": "{YOUR_CONFLUENT_CLOUD_BROKER}.eastus.azure.confluent.cloud:9092", - "ConfluentCloudUserName": "{YOUR_CONFLUENT_CLOUD_USERNAME}", - "ConfluentCloudPassword": "{YOUR_CONFLUENT_CLOUD_PASSWORD}", - "FUNCTIONS_WORKER_RUNTIME": "node", - "AzureWebJobsStorage": "" - } -} \ No newline at end of file diff --git a/samples/typescript/kafka-trigger/package-lock.json b/samples/typescript/kafka-trigger/package-lock.json deleted file mode 100644 index 4fa1715..0000000 --- a/samples/typescript/kafka-trigger/package-lock.json +++ /dev/null @@ -1,526 +0,0 @@ -{ - "name": "function-users-kafka-trigger", - "version": "0.1.0", - "lockfileVersion": 1, - "requires": true, - "dependencies": { - "@azure/functions": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@azure/functions/-/functions-1.2.0.tgz", - "integrity": "sha512-qkaQqTnr56xUnYNkKBM/2wsnf6imAJ3NF6Nbpk691Y6JYliA1YdZngsZsrpHS9tQ9/71MqARl8m50+EmEfLG3g==", - "dev": true - }, - "@types/node": { - "version": "12.12.31", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.31.tgz", - "integrity": "sha512-T+wnJno8uh27G9c+1T+a1/WYCHzLeDqtsGJkoEdSp2X8RTh3oOCZQcUnjAx90CS8cmmADX51O0FI/tu9s0yssg==" - }, - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "define-properties": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", - "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", - "dev": true, - "requires": { - "object-keys": "^1.0.12" - } - }, - "error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dev": true, - "requires": { - "is-arrayish": "^0.2.1" - } - }, - "es-abstract": { - "version": "1.17.5", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", - "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", - "dev": true, - "requires": { - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.1.5", - "is-regex": "^1.0.5", - "object-inspect": "^1.7.0", - "object-keys": "^1.1.1", - "object.assign": "^4.1.0", - "string.prototype.trimleft": "^2.1.1", - "string.prototype.trimright": "^2.1.1" - } - }, - "es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "dev": true, - "requires": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - } - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "graceful-fs": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", - "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", - "dev": true - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true - }, - "has-symbols": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", - "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", - "dev": true - }, - "hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", - "dev": true - }, - "is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", - "dev": true - }, - "is-callable": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", - "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", - "dev": true - }, - "is-date-object": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", - "dev": true - }, - "is-regex": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", - "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", - "dev": true, - "requires": { - "has": "^1.0.3" - } - }, - "is-symbol": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", - "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", - "dev": true, - "requires": { - "has-symbols": "^1.0.1" - } - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", - "dev": true - }, - "load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - } - }, - "memorystream": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", - "integrity": "sha1-htcJCzDORV1j+64S3aUaR93K+bI=", - "dev": true - }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, - "normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "requires": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "npm-run-all": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/npm-run-all/-/npm-run-all-4.1.5.tgz", - "integrity": "sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "chalk": "^2.4.1", - "cross-spawn": "^6.0.5", - "memorystream": "^0.3.1", - "minimatch": "^3.0.4", - "pidtree": "^0.3.0", - "read-pkg": "^3.0.0", - "shell-quote": "^1.6.1", - "string.prototype.padend": "^3.0.0" - } - }, - "object-inspect": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", - "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", - "dev": true - }, - "object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true - }, - "object.assign": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", - "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", - "dev": true, - "requires": { - "define-properties": "^1.1.2", - "function-bind": "^1.1.1", - "has-symbols": "^1.0.0", - "object-keys": "^1.0.11" - } - }, - "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "requires": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - } - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true - }, - "path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true - }, - "path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "requires": { - "pify": "^3.0.0" - } - }, - "pidtree": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.3.0.tgz", - "integrity": "sha512-9CT4NFlDcosssyg8KVFltgokyKZIFjoBxw8CTGy+5F38Y1eQWrt8tRayiUOXE+zVKQnYu5BR8JjCtvK3BcnBhg==", - "dev": true - }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true - }, - "read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "dev": true, - "requires": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - } - }, - "resolve": { - "version": "1.15.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", - "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", - "dev": true, - "requires": { - "path-parse": "^1.0.6" - } - }, - "safe-buffer": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", - "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true - }, - "shell-quote": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.2.tgz", - "integrity": "sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg==", - "dev": true - }, - "spdx-correct": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", - "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", - "dev": true, - "requires": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-exceptions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", - "dev": true - }, - "spdx-expression-parse": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", - "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", - "dev": true, - "requires": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-license-ids": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", - "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", - "dev": true - }, - "string.prototype.padend": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string.prototype.padend/-/string.prototype.padend-3.1.0.tgz", - "integrity": "sha512-3aIv8Ffdp8EZj8iLwREGpQaUZiPyrWrpzMBHvkiSW/bK/EGve9np07Vwy7IJ5waydpGXzQZu/F8Oze2/IWkBaA==", - "dev": true, - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1" - } - }, - "string.prototype.trimleft": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz", - "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==", - "dev": true, - "requires": { - "define-properties": "^1.1.3", - "function-bind": "^1.1.1" - } - }, - "string.prototype.trimright": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz", - "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==", - "dev": true, - "requires": { - "define-properties": "^1.1.3", - "function-bind": "^1.1.1" - } - }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - } - }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", - "dev": true - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - }, - "typecript": { - "version": "0.0.1-security", - "resolved": "https://registry.npmjs.org/typecript/-/typecript-0.0.1-security.tgz", - "integrity": "sha512-88WUWdO0zl/cC+X44m7TJkNssn0sTwZOSAyZu7ZmY6BfGJ4idVWJGjFmd6wEjS3yhWUYrAszzNMHZMIJ7h3ycg==" - }, - "typescript": { - "version": "3.8.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.3.tgz", - "integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==", - "dev": true - }, - "validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, - "requires": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } -} diff --git a/samples/typescript/local.settings.json.eventhub.example b/samples/typescript/local.settings.json.eventhub.example new file mode 100644 index 0000000..03ef301 --- /dev/null +++ b/samples/typescript/local.settings.json.eventhub.example @@ -0,0 +1,10 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "node", + "BrokerList": ".servicebus.windows.net:9093", + "EventHubConnectionString": "" + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} \ No newline at end of file diff --git a/samples/typescript/local.settings.json.example b/samples/typescript/local.settings.json.example new file mode 100644 index 0000000..ce0f405 --- /dev/null +++ b/samples/typescript/local.settings.json.example @@ -0,0 +1,11 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "python", + "BrokerList": "{YOUR_CONFLUENT_CLOUD_BROKER}", + "ConfluentCloudUserName": "{YOUR_CONFLUENT_CLOUD_USERNAME}", + "ConfluentCloudPassword": "{YOUR_CONFLUENT_CLOUD_PASSWORD}", + "topic": "{YOUR_KAFKA_TOPIC_NAME}" + } +} \ No newline at end of file diff --git a/samples/typescript/package-lock.json b/samples/typescript/package-lock.json new file mode 100644 index 0000000..29b464d --- /dev/null +++ b/samples/typescript/package-lock.json @@ -0,0 +1,1216 @@ +{ + "name": "function-users-kafka-trigger", + "version": "0.1.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "function-users-kafka-trigger", + "version": "0.1.0", + "dependencies": { + "@types/node": "^12.12.31", + "string_decoder": "^1.3.0", + "typecript": "0.0.1-security" + }, + "devDependencies": { + "@azure/functions": "^1.2.0", + "npm-run-all": "^4.1.5", + "typescript": "^3.3.3" + } + }, + "node_modules/@azure/functions": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@azure/functions/-/functions-1.2.0.tgz", + "integrity": "sha512-qkaQqTnr56xUnYNkKBM/2wsnf6imAJ3NF6Nbpk691Y6JYliA1YdZngsZsrpHS9tQ9/71MqARl8m50+EmEfLG3g==", + "dev": true + }, + "node_modules/@types/node": { + "version": "12.12.31", + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.31.tgz", + "integrity": "sha512-T+wnJno8uh27G9c+1T+a1/WYCHzLeDqtsGJkoEdSp2X8RTh3oOCZQcUnjAx90CS8cmmADX51O0FI/tu9s0yssg==" + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "engines": { + "node": ">=4.8" + } + }, + "node_modules/define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "dependencies": { + "object-keys": "^1.0.12" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, + "dependencies": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/graceful-fs": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", + "dev": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "node_modules/is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "node_modules/json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "node_modules/load-json-file": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "parse-json": "^4.0.0", + "pify": "^3.0.0", + "strip-bom": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/memorystream": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", + "integrity": "sha1-htcJCzDORV1j+64S3aUaR93K+bI=", + "dev": true, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true + }, + "node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/npm-run-all": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/npm-run-all/-/npm-run-all-4.1.5.tgz", + "integrity": "sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "chalk": "^2.4.1", + "cross-spawn": "^6.0.5", + "memorystream": "^0.3.1", + "minimatch": "^3.0.4", + "pidtree": "^0.3.0", + "read-pkg": "^3.0.0", + "shell-quote": "^1.6.1", + "string.prototype.padend": "^3.0.0" + }, + "bin": { + "npm-run-all": "bin/npm-run-all/index.js", + "run-p": "bin/run-p/index.js", + "run-s": "bin/run-s/index.js" + }, + "engines": { + "node": ">= 4" + } + }, + "node_modules/object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "dependencies": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, + "dependencies": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "dev": true, + "dependencies": { + "pify": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pidtree": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.3.0.tgz", + "integrity": "sha512-9CT4NFlDcosssyg8KVFltgokyKZIFjoBxw8CTGy+5F38Y1eQWrt8tRayiUOXE+zVKQnYu5BR8JjCtvK3BcnBhg==", + "dev": true, + "bin": { + "pidtree": "bin/pidtree.js" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", + "dev": true, + "dependencies": { + "load-json-file": "^4.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, + "dependencies": { + "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", + "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" + }, + "node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shell-quote": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.2.tgz", + "integrity": "sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg==", + "dev": true + }, + "node_modules/spdx-correct": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", + "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", + "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", + "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", + "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", + "dev": true + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string.prototype.padend": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string.prototype.padend/-/string.prototype.padend-3.1.0.tgz", + "integrity": "sha512-3aIv8Ffdp8EZj8iLwREGpQaUZiPyrWrpzMBHvkiSW/bK/EGve9np07Vwy7IJ5waydpGXzQZu/F8Oze2/IWkBaA==", + "dev": true, + "dependencies": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimleft": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz", + "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==", + "dev": true, + "dependencies": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimright": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz", + "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==", + "dev": true, + "dependencies": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/typecript": { + "version": "0.0.1-security", + "resolved": "https://registry.npmjs.org/typecript/-/typecript-0.0.1-security.tgz", + "integrity": "sha512-88WUWdO0zl/cC+X44m7TJkNssn0sTwZOSAyZu7ZmY6BfGJ4idVWJGjFmd6wEjS3yhWUYrAszzNMHZMIJ7h3ycg==" + }, + "node_modules/typescript": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.3.tgz", + "integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + } + }, + "dependencies": { + "@azure/functions": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@azure/functions/-/functions-1.2.0.tgz", + "integrity": "sha512-qkaQqTnr56xUnYNkKBM/2wsnf6imAJ3NF6Nbpk691Y6JYliA1YdZngsZsrpHS9tQ9/71MqARl8m50+EmEfLG3g==", + "dev": true + }, + "@types/node": { + "version": "12.12.31", + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.31.tgz", + "integrity": "sha512-T+wnJno8uh27G9c+1T+a1/WYCHzLeDqtsGJkoEdSp2X8RTh3oOCZQcUnjAx90CS8cmmADX51O0FI/tu9s0yssg==" + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "requires": { + "object-keys": "^1.0.12" + } + }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "requires": { + "is-arrayish": "^0.2.1" + } + }, + "es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "graceful-fs": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", + "dev": true + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, + "hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-date-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "dev": true, + "requires": { + "has-symbols": "^1.0.1" + } + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "load-json-file": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^4.0.0", + "pify": "^3.0.0", + "strip-bom": "^3.0.0" + } + }, + "memorystream": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", + "integrity": "sha1-htcJCzDORV1j+64S3aUaR93K+bI=", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true + }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "npm-run-all": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/npm-run-all/-/npm-run-all-4.1.5.tgz", + "integrity": "sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "chalk": "^2.4.1", + "cross-spawn": "^6.0.5", + "memorystream": "^0.3.1", + "minimatch": "^3.0.4", + "pidtree": "^0.3.0", + "read-pkg": "^3.0.0", + "shell-quote": "^1.6.1", + "string.prototype.padend": "^3.0.0" + } + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "requires": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + } + }, + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, + "requires": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + } + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "dev": true, + "requires": { + "pify": "^3.0.0" + } + }, + "pidtree": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.3.0.tgz", + "integrity": "sha512-9CT4NFlDcosssyg8KVFltgokyKZIFjoBxw8CTGy+5F38Y1eQWrt8tRayiUOXE+zVKQnYu5BR8JjCtvK3BcnBhg==", + "dev": true + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true + }, + "read-pkg": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", + "dev": true, + "requires": { + "load-json-file": "^4.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^3.0.0" + } + }, + "resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "safe-buffer": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", + "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true + }, + "shell-quote": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.2.tgz", + "integrity": "sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg==", + "dev": true + }, + "spdx-correct": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", + "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "dev": true, + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", + "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", + "dev": true + }, + "spdx-expression-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", + "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "dev": true, + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", + "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", + "dev": true + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + } + }, + "string.prototype.padend": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string.prototype.padend/-/string.prototype.padend-3.1.0.tgz", + "integrity": "sha512-3aIv8Ffdp8EZj8iLwREGpQaUZiPyrWrpzMBHvkiSW/bK/EGve9np07Vwy7IJ5waydpGXzQZu/F8Oze2/IWkBaA==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + } + }, + "string.prototype.trimleft": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz", + "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } + }, + "string.prototype.trimright": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz", + "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "typecript": { + "version": "0.0.1-security", + "resolved": "https://registry.npmjs.org/typecript/-/typecript-0.0.1-security.tgz", + "integrity": "sha512-88WUWdO0zl/cC+X44m7TJkNssn0sTwZOSAyZu7ZmY6BfGJ4idVWJGjFmd6wEjS3yhWUYrAszzNMHZMIJ7h3ycg==" + }, + "typescript": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.3.tgz", + "integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==", + "dev": true + }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + } + } +} diff --git a/samples/typescript/kafka-trigger/package.json b/samples/typescript/package.json similarity index 100% rename from samples/typescript/kafka-trigger/package.json rename to samples/typescript/package.json diff --git a/samples/typescript/kafka-trigger/tsconfig.json b/samples/typescript/tsconfig.json similarity index 100% rename from samples/typescript/kafka-trigger/tsconfig.json rename to samples/typescript/tsconfig.json