CI/CD - move to github actions (#31)
- Moved to github actions for CI/CD - Added end-to-end testing
This commit is contained in:
Родитель
f214df50bf
Коммит
9f5bb1b2e9
|
@ -0,0 +1,100 @@
|
|||
name: build
|
||||
|
||||
on:
|
||||
push:
|
||||
# Sequence of patterns matched against refs/tags
|
||||
branches:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build gem
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
logstash-version: ['6.7', '6.8', '7.0', '7.x', 'default' ]
|
||||
env:
|
||||
LOGSTASH_SOURCE: 1
|
||||
LOGSTASH_PATH: ..//logstash
|
||||
steps:
|
||||
- if: matrix.logstash-version != 'default'
|
||||
name: Set up JDK 1.8
|
||||
uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: 1.8
|
||||
- uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: jruby
|
||||
bundler-cache: true
|
||||
- if: matrix.logstash-version != 'default'
|
||||
name: Checkout logstash ${{ matrix.logstash-version }}
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "elastic/logstash"
|
||||
ref: ${{ matrix.logstash-version }}
|
||||
path: logstash
|
||||
- if: matrix.logstash-version != 'default'
|
||||
name: Build logstash
|
||||
run: ./gradlew assemble
|
||||
working-directory: logstash
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: 'kusto'
|
||||
- run: bundle install
|
||||
working-directory: 'kusto'
|
||||
- name: Test
|
||||
run: bundle exec rake spec_junit
|
||||
working-directory: 'kusto'
|
||||
- run: gem build *.gemspec
|
||||
working-directory: 'kusto'
|
||||
- run: mv *.gem logstash-kusto.gem
|
||||
working-directory: 'kusto'
|
||||
- if: matrix.logstash-version == 'default'
|
||||
name: Upload gem
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: logstash-kusto.gem
|
||||
path: 'kusto/logstash-kusto.gem'
|
||||
- if: matrix.logstash-version == 'default'
|
||||
name: Publish Unit Test Results
|
||||
uses: EnricoMi/publish-unit-test-result-action@v1.6
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: kusto/rspec.xml
|
||||
e2e:
|
||||
name: End-To-End Testing
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
steps:
|
||||
- uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: jruby
|
||||
bundler-cache: true
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Download gem
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: logstash-kusto.gem
|
||||
- name: Install logstash # taken from logstash's website https://www.elastic.co/guide/en/logstash/7.10/installing-logstash.html#_apt
|
||||
run: |
|
||||
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
|
||||
sudo apt-get install apt-transport-https
|
||||
echo "deb https://artifacts.elastic.co/packages/7.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-7.x.list
|
||||
sudo apt-get update && sudo apt-get install logstash
|
||||
- name: Install plugin
|
||||
run: sudo /usr/share/logstash/bin/logstash-plugin install logstash-kusto.gem
|
||||
- run: sudo env "PATH=$PATH" bundle install
|
||||
- run: sudo chmod -R 777 /usr/share/logstash
|
||||
- run: sudo chmod -R 777 .
|
||||
- name: Run e2e
|
||||
run: ruby e2e.rb
|
||||
working-directory: 'e2e'
|
||||
env:
|
||||
ENGINE_URL: ${{ secrets.ENGINE_URL }}
|
||||
INGEST_URL: ${{ secrets.INGEST_URL }}
|
||||
APP_ID: ${{ secrets.APP_ID }}
|
||||
APP_KEY: ${{ secrets.APP_KEY }}
|
||||
TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
TEST_DATABASE: ${{ secrets.TEST_DATABASE }}
|
|
@ -0,0 +1,43 @@
|
|||
on:
|
||||
release:
|
||||
types: [created]
|
||||
|
||||
name: Deploy and Create Release
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Upload Release Asset
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: jruby
|
||||
bundler-cache: true
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- run: bundle install
|
||||
- name: Get release
|
||||
id: get_release
|
||||
uses: bruceadams/get-release@v1.2.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
- name: Publish gem
|
||||
uses: dawidd6/action-publish-gem@v1
|
||||
with:
|
||||
api_key: ${{secrets.RUBYGEMS_KEY}}
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
- name: Set artifact name
|
||||
id: set_artifact_name
|
||||
run: |
|
||||
ARTIFACT_PATH=$(find . -maxdepth 1 -iname '*.gem')
|
||||
echo "::set-output name=artifact_name::$ARTIFACT_PATH"
|
||||
- name: Upload Release Asset
|
||||
id: upload-release-asset
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.get_release.outputs.upload_url }}
|
||||
asset_path: ./${{ steps.set_artifact_name.outputs.artifact_name }}
|
||||
asset_name: ${{ steps.set_artifact_name.outputs.artifact_name }}
|
||||
asset_content_type: application/zip
|
|
@ -30,4 +30,12 @@ Gemfile.lock
|
|||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
!.vscode/extensions.json
|
||||
|
||||
#IDEA
|
||||
.idea
|
||||
.idea/*
|
||||
|
||||
#Jar files and generated files
|
||||
**/*.jar
|
||||
**/*_jars.rb
|
24
.travis.yml
24
.travis.yml
|
@ -1,24 +0,0 @@
|
|||
sudo: false
|
||||
language: ruby
|
||||
cache: bundler
|
||||
matrix:
|
||||
include:
|
||||
- rvm: jruby-9.2.13.0
|
||||
env: LOGSTASH_BRANCH=7.x
|
||||
- rvm: jruby-9.2.13.0
|
||||
env: LOGSTASH_BRANCH=7.0
|
||||
- rvm: jruby-9.2.13.0
|
||||
env: LOGSTASH_BRANCH=6.8
|
||||
- rvm: jruby-9.2.13.0
|
||||
env: LOGSTASH_BRANCH=6.7
|
||||
- rvm: jruby-9.1.13.0
|
||||
env: LOGSTASH_BRANCH=6.6
|
||||
fast_finish: true
|
||||
install: true
|
||||
script: ci/build.sh
|
||||
jdk: openjdk8
|
||||
deploy:
|
||||
provider: rubygems
|
||||
api_key: $rubygems_api_key
|
||||
on:
|
||||
tags: true
|
10
Gemfile
10
Gemfile
|
@ -1,5 +1,3 @@
|
|||
gem 'ruby-maven', '~> 3.3.11'
|
||||
|
||||
source 'https://rubygems.org'
|
||||
gemspec
|
||||
|
||||
|
@ -8,7 +6,15 @@ gemspec
|
|||
logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
|
||||
use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
|
||||
|
||||
puts "cwd: #{Dir.getwd} ,use_logstash_source: #{use_logstash_source}, logstash_path: #{logstash_path}, exists: #{Dir.exist?(logstash_path)}"
|
||||
|
||||
if Dir.exist?(logstash_path) && use_logstash_source
|
||||
puts "Using local logstash"
|
||||
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
||||
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
||||
else
|
||||
puts "using default logstash"
|
||||
end
|
||||
|
||||
gem "rspec"
|
||||
gem "rspec_junit_formatter"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Logstash Output Plugin for Azure Data Explorer (Kusto)
|
||||
|
||||
[![Build Status](https://img.shields.io/travis/azure/logstash-output-kusto/master.svg?label=master)](https://travis-ci.org/Azure/logstash-output-kusto)
|
||||
[![Build Status](https://img.shields.io/travis/azure/logstash-output-kusto/dev.svg?label=dev)](https://travis-ci.org/Azure/logstash-output-kusto)
|
||||
![build](https://github.com/Azure/logstash-output-kusto/workflows/build/badge.svg)
|
||||
![build](https://github.com/Azure/logstash-output-kusto/workflows/build/badge.svg?branch=master)
|
||||
[![Gem](https://img.shields.io/gem/v/logstash-output-kusto.svg)](https://rubygems.org/gems/logstash-output-kusto)
|
||||
[![Gem](https://img.shields.io/gem/dt/logstash-output-kusto.svg)](https://rubygems.org/gems/logstash-output-kusto)
|
||||
|
||||
|
|
12
Rakefile
12
Rakefile
|
@ -4,4 +4,14 @@ task :default do
|
|||
system("rake -T")
|
||||
end
|
||||
|
||||
require "logstash/devutils/rake"
|
||||
require "logstash/devutils/rake"
|
||||
|
||||
begin
|
||||
require 'rspec/core/rake_task'
|
||||
RSpec::Core::RakeTask.new(:spec)
|
||||
rescue LoadError
|
||||
end
|
||||
|
||||
RSpec::Core::RakeTask.new(:spec_junit) do |t|
|
||||
t.rspec_opts = '--format RspecJunitFormatter --out rspec.xml'
|
||||
end
|
23
ci/build.sh
23
ci/build.sh
|
@ -1,23 +0,0 @@
|
|||
#!/bin/bash
|
||||
# version: 1
|
||||
########################################################
|
||||
#
|
||||
# AUTOMATICALLY GENERATED! DO NOT EDIT
|
||||
#
|
||||
########################################################
|
||||
set -e
|
||||
|
||||
echo "Starting build process in: `pwd`"
|
||||
source ./ci/setup.sh
|
||||
|
||||
if [[ -f "ci/run.sh" ]]; then
|
||||
echo "Running custom build script in: `pwd`/ci/run.sh"
|
||||
source ./ci/run.sh
|
||||
else
|
||||
echo "Running default build scripts in: `pwd`/ci/build.sh"
|
||||
bundle install
|
||||
bundle exec rake vendor
|
||||
bundle exec rspec spec
|
||||
lock_jars
|
||||
gem build logstash-output-kusto.gemspec
|
||||
fi
|
26
ci/setup.sh
26
ci/setup.sh
|
@ -1,26 +0,0 @@
|
|||
#!/bin/bash
|
||||
# version: 1
|
||||
########################################################
|
||||
#
|
||||
# AUTOMATICALLY GENERATED! DO NOT EDIT
|
||||
#
|
||||
########################################################
|
||||
set -e
|
||||
if [ "$LOGSTASH_BRANCH" ]; then
|
||||
echo "Building plugin using Logstash source"
|
||||
BASE_DIR=`pwd`
|
||||
echo "Checking out branch: $LOGSTASH_BRANCH"
|
||||
git clone -b $LOGSTASH_BRANCH https://github.com/elastic/logstash.git ../../logstash --depth 1
|
||||
printf "Checked out Logstash revision: %s\n" "$(git -C ../../logstash rev-parse HEAD)"
|
||||
cd ../../logstash
|
||||
echo "Building plugins with Logstash version:"
|
||||
cat versions.yml
|
||||
echo "---"
|
||||
# We need to build the jars for that specific version
|
||||
echo "Running gradle assemble in: `pwd`"
|
||||
./gradlew assemble
|
||||
cd $BASE_DIR
|
||||
export LOGSTASH_SOURCE=1
|
||||
else
|
||||
echo "Building plugin using released gems on rubygems"
|
||||
fi
|
|
@ -0,0 +1,10 @@
|
|||
0,00000000-0000-0000-0001-020304050607,0.0,0.0,0,0,0,0,0,0,0,0,2014-01-01T01:01:01.0000000Z,Zero,"Zero",0,00:00:00,,null
|
||||
1,00000001-0000-0000-0001-020304050607,1.0001,1.01,1,1,1,1,1,1,1,1,2015-01-01T01:01:01.0000000Z,One,"One",1,00:00:01.0010001,,"{""arr"":[0,1],""rowId"":1}"
|
||||
2,00000002-0000-0000-0001-020304050607,2.0002,2.02,0,2,2,2,2,2,2,2,2016-01-01T01:01:01.0000000Z,Two,"Two",2,-00:00:02.0020002,,"{""arr"":[0,2],""rowId"":2}"
|
||||
3,00000003-0000-0000-0001-020304050607,3.0003,3.03,1,3,3,3,3,3,3,3,2017-01-01T01:01:01.0000000Z,Three,"Three",3,00:00:03.0030003,,"{""arr"":[0,3],""rowId"":3}"
|
||||
4,00000004-0000-0000-0001-020304050607,4.0004,4.04,0,4,4,4,4,4,4,4,2018-01-01T01:01:01.0000000Z,Four,"Four",4,-00:00:04.0040004,,"{""arr"":[0,4],""rowId"":4}"
|
||||
5,00000005-0000-0000-0001-020304050607,5.0005,5.05,1,5,5,5,5,5,5,5,2019-01-01T01:01:01.0000000Z,Five,"Five",5,00:00:05.0050005,,"{""arr"":[0,5],""rowId"":5}"
|
||||
6,00000006-0000-0000-0001-020304050607,6.0006,6.06,0,6,6,6,6,6,6,6,2020-01-01T01:01:01.0000000Z,Six,"Six",6,-00:00:06.0060006,,"{""arr"":[0,6],""rowId"":6}"
|
||||
7,00000007-0000-0000-0001-020304050607,7.0007,7.07,1,7,7,7,7,7,7,7,2021-01-01T01:01:01.0000000Z,Seven,"Seven",7,00:00:07.0070007,,"{""arr"":[0,7],""rowId"":7}"
|
||||
8,00000008-0000-0000-0001-020304050607,8.0008,8.08,0,8,8,8,8,8,8,8,2022-01-01T01:01:01.0000000Z,Eight,"Eight",8,-00:00:08.0080008,,"{""arr"":[0,8],""rowId"":8}"
|
||||
9,00000009-0000-0000-0001-020304050607,9.0009,9.09,1,9,9,9,9,9,9,9,2023-01-01T01:01:01.0000000Z,Nine,"Nine",9,00:00:09.0090009,,"{""arr"":[0,9],""rowId"":9}"
|
|
|
@ -0,0 +1 @@
|
|||
[{"Properties":{"Path":"$.rownumber"},"column":"rownumber","datatype":"int"},{"Properties":{"Path":"$.rowguid"},"column":"rowguid","datatype":"string"},{"Properties":{"Path":"$.xdouble"},"column":"xdouble","datatype":"real"},{"Properties":{"Path":"$.xfloat"},"column":"xfloat","datatype":"real"},{"Properties":{"Path":"$.xbool"},"column":"xbool","datatype":"bool"},{"Properties":{"Path":"$.xint16"},"column":"xint16","datatype":"int"},{"Properties":{"Path":"$.xint32"},"column":"xint32","datatype":"int"},{"Properties":{"Path":"$.xint64"},"column":"xint64","datatype":"long"},{"Properties":{"Path":"$.xuint8"},"column":"xuint8","datatype":"long"},{"Properties":{"Path":"$.xuint16"},"column":"xuint16","datatype":"long"},{"Properties":{"Path":"$.xuint32"},"column":"xuint32","datatype":"long"},{"Properties":{"Path":"$.xuint64"},"column":"xuint64","datatype":"long"},{"Properties":{"Path":"$.xdate"},"column":"xdate","datatype":"datetime"},{"Properties":{"Path":"$.xsmalltext"},"column":"xsmalltext","datatype":"string"},{"Properties":{"Path":"$.xtext"},"column":"xtext","datatype":"string"},{"Properties":{"Path":"$.rowguid"},"column":"xnumberAsText","datatype":"string"},{"Properties":{"Path":"$.xtime"},"column":"xtime","datatype":"timespan"},{"Properties":{"Path":"$.xtextWithNulls"},"column":"xtextWithNulls","datatype":"string"},{"Properties":{"Path":"$.xdynamicWithNulls"},"column":"xdynamicWithNulls","datatype":"dynamic"}]
|
|
@ -0,0 +1,126 @@
|
|||
require '../lib/logstash-output-kusto_jars'
|
||||
require 'csv'
|
||||
|
||||
$kusto_java = Java::com.microsoft.azure.kusto
|
||||
|
||||
class E2E
|
||||
|
||||
def initialize
|
||||
super
|
||||
@input_file = "/tmp/input_file.txt"
|
||||
@output_file = "output_file.txt"
|
||||
@columns = "(rownumber:int, rowguid:string, xdouble:real, xfloat:real, xbool:bool, xint16:int, xint32:int, xint64:long, xuint8:long, xuint16:long, xuint32:long, xuint64:long, xdate:datetime, xsmalltext:string, xtext:string, xnumberAsText:string, xtime:timespan, xtextWithNulls:string, xdynamicWithNulls:dynamic)"
|
||||
@csv_columns = '"rownumber", "rowguid", "xdouble", "xfloat", "xbool", "xint16", "xint32", "xint64", "xuint8", "xuint16", "xuint32", "xuint64", "xdate", "xsmalltext", "xtext", "xnumberAsText", "xtime", "xtextWithNulls", "xdynamicWithNulls"'
|
||||
@column_count = 19
|
||||
@engine_url = ENV["ENGINE_URL"]
|
||||
@ingest_url = ENV["INGEST_URL"]
|
||||
@app_id = ENV["APP_ID"]
|
||||
@app_kay = ENV['APP_KEY']
|
||||
@tenant_id = ENV['TENANT_ID']
|
||||
@database = ENV['TEST_DATABASE']
|
||||
@table = "RubyE2E#{Time.now.getutc.to_i}"
|
||||
@mapping_name = "test_mapping"
|
||||
@csv_file = "dataset.csv"
|
||||
|
||||
@logstash_config = %{
|
||||
input {
|
||||
file { path => "#{@input_file}"}
|
||||
}
|
||||
filter {
|
||||
csv { columns => [#{@csv_columns}]}
|
||||
}
|
||||
output {
|
||||
file { path => "#{@output_file}"}
|
||||
stdout { codec => rubydebug }
|
||||
kusto {
|
||||
path => "tmp%{+YYYY-MM-dd-HH-mm}.txt"
|
||||
ingest_url => "#{@ingest_url}"
|
||||
app_id => "#{@app_id}"
|
||||
app_key => "#{@app_kay}"
|
||||
app_tenant => "#{@tenant_id}"
|
||||
database => "#{@database}"
|
||||
table => "#{@table}"
|
||||
json_mapping => "#{@mapping_name}"
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
def create_table_and_mapping
|
||||
puts "Creating table #{@table}"
|
||||
@query_client.execute(@database, ".drop table #{@table} ifexists")
|
||||
sleep(1)
|
||||
@query_client.execute(@database, ".create table #{@table} #{@columns}")
|
||||
@query_client.execute(@database, ".alter table #{@table} policy ingestionbatching @'{\"MaximumBatchingTimeSpan\":\"00:00:10\", \"MaximumNumberOfItems\": 1, \"MaximumRawDataSizeMB\": 100}'
|
||||
")
|
||||
@query_client.execute(@database, ".create table #{@table} ingestion json mapping '#{@mapping_name}' '#{File.read("dataset_mapping.json")}'")
|
||||
end
|
||||
|
||||
def run_logstash
|
||||
File.write("logstash.conf", @logstash_config)
|
||||
|
||||
File.write(@output_file, "")
|
||||
File.write(@input_file, "")
|
||||
spawn("/usr/share/logstash/bin/logstash -f logstash.conf")
|
||||
sleep(60)
|
||||
data = File.read(@csv_file)
|
||||
f = File.open(@input_file, "a")
|
||||
f.write(data)
|
||||
f.close
|
||||
sleep(60)
|
||||
puts File.read(@output_file)
|
||||
end
|
||||
|
||||
def assert_data
|
||||
max_timeout = 10
|
||||
csv_data = CSV.read(@csv_file)
|
||||
|
||||
(0...max_timeout).each do |_|
|
||||
begin
|
||||
sleep(5)
|
||||
query = @query_client.execute(@database, "#{@table} | sort by rownumber asc")
|
||||
result = query.getPrimaryResults()
|
||||
raise "Wrong count - expected #{csv_data.length}, got #{result.count()}" unless result.count() == csv_data.length
|
||||
rescue Exception => e
|
||||
puts "Error: #{e}"
|
||||
end
|
||||
(0...csv_data.length).each do |i|
|
||||
result.next()
|
||||
puts "Item #{i}"
|
||||
(0...@column_count).each do |j|
|
||||
csv_item = csv_data[i][j]
|
||||
result_item = result.getObject(j) == nil ? "null" : result.getString(j)
|
||||
|
||||
#special cases for data that is different in csv vs kusto
|
||||
if j == 4 #kusto boolean field
|
||||
csv_item = csv_item.to_s == "1" ? "true" : "false"
|
||||
elsif j == 12 # date formatting
|
||||
csv_item = csv_item.sub(".0000000", "")
|
||||
elsif j == 15 # numbers as text
|
||||
result_item = i.to_s
|
||||
elsif j == 17 #null
|
||||
next
|
||||
end
|
||||
puts " csv[#{j}] = #{csv_item}"
|
||||
puts " result[#{j}] = #{result_item}"
|
||||
raise "Result Doesn't match csv" unless csv_item == result_item
|
||||
end
|
||||
puts ""
|
||||
end
|
||||
return
|
||||
|
||||
end
|
||||
raise "Failed after timeouts"
|
||||
|
||||
end
|
||||
|
||||
def start
|
||||
@query_client = $kusto_java.data.ClientImpl.new($kusto_java.data.ConnectionStringBuilder::createWithAadApplicationCredentials(@engine_url, @app_id, @app_kay, @tenant_id))
|
||||
create_table_and_mapping
|
||||
run_logstash
|
||||
assert_data
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
E2E::new().start
|
|
@ -114,12 +114,15 @@ class LogStash::Outputs::Kusto < LogStash::Outputs::Base
|
|||
@files = {}
|
||||
@io_mutex = Mutex.new
|
||||
|
||||
json_mapping ||= mapping
|
||||
final_mapping = json_mapping
|
||||
if final_mapping.empty?
|
||||
final_mapping = mapping
|
||||
end
|
||||
|
||||
# TODO: add id to the tmp path to support multiple outputs of the same type
|
||||
# add fields from the meta that will note the destination of the events in the file
|
||||
@path = if dynamic_event_routing
|
||||
File.expand_path("#{path}.%{[@metadata][database]}.%{[@metadata][table]}.%{[@metadata][json_mapping]}")
|
||||
File.expand_path("#{path}.%{[@metadata][database]}.%{[@metadata][table]}.%{[@metadata][final_mapping]}")
|
||||
else
|
||||
File.expand_path("#{path}.#{database}.#{table}")
|
||||
end
|
||||
|
@ -138,7 +141,7 @@ class LogStash::Outputs::Kusto < LogStash::Outputs::Base
|
|||
max_queue: upload_queue_size,
|
||||
fallback_policy: :caller_runs)
|
||||
|
||||
@ingestor = Ingestor.new(ingest_url, app_id, app_key, app_tenant, database, table, json_mapping, delete_temp_files, @logger, executor)
|
||||
@ingestor = Ingestor.new(ingest_url, app_id, app_key, app_tenant, database, table, final_mapping, delete_temp_files, @logger, executor)
|
||||
|
||||
# send existing files
|
||||
recover_past_files if recovery
|
||||
|
|
|
@ -41,7 +41,6 @@ class LogStash::Outputs::Kusto < LogStash::Outputs::Base
|
|||
@ingestion_properties = kusto_java.ingest.IngestionProperties.new(database, table)
|
||||
@ingestion_properties.setIngestionMapping(json_mapping, kusto_java.ingest.IngestionMapping::IngestionMappingKind::Json)
|
||||
@ingestion_properties.setDataFormat(kusto_java.ingest.IngestionProperties::DATA_FORMAT::json)
|
||||
|
||||
@delete_local = delete_local
|
||||
|
||||
@logger.debug('Kusto resources are ready.')
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
Gem::Specification.new do |s|
|
||||
s.name = 'logstash-output-kusto' #WATCH OUT: we hardcoded usage of this name in one of the classes.
|
||||
s.version = '1.0.0'
|
||||
s.version = '1.0.2'
|
||||
s.licenses = ['Apache-2.0']
|
||||
s.summary = 'Writes events to Azure Data Explorer (Kusto)'
|
||||
s.description = 'This is a logstash output plugin used to write events to an Azure Data Explorer (a.k.a Kusto)'
|
||||
|
|
|
@ -2,4 +2,20 @@
|
|||
require "logstash/devutils/rspec/spec_helper"
|
||||
require "logstash/logging/logger"
|
||||
|
||||
LogStash::Logging::Logger::configure_logging("debug")
|
||||
LogStash::Logging::Logger::configure_logging("debug")
|
||||
|
||||
RSpec.configure do |config|
|
||||
# register around filter that captures stdout and stderr
|
||||
config.around(:each) do |example|
|
||||
$stdout = StringIO.new
|
||||
$stderr = StringIO.new
|
||||
|
||||
example.run
|
||||
|
||||
example.metadata[:stdout] = $stdout.string
|
||||
example.metadata[:stderr] = $stderr.string
|
||||
|
||||
$stdout = STDOUT
|
||||
$stderr = STDERR
|
||||
end
|
||||
end
|
Загрузка…
Ссылка в новой задаче