From d3e80c18c2dee129141e09343fa0542456b7ab4c Mon Sep 17 00:00:00 2001 From: hoenn Date: Tue, 2 Apr 2024 12:05:13 -0400 Subject: [PATCH 1/2] Add ES8 tests, add test data for breaking change --- integration_data/{ => v5}/documents.dat | 0 integration_data/v6/documents.dat | 20 +++++ integration_data/v7/documents.dat | 20 +++++ integration_data/v8/documents.dat | 20 +++++ .../v8/elasticsearch-cluster-compose.yml | 82 +++++++++++++++++++ integration_data/v8/elasticsearch.dockerfile | 7 ++ integration_test.go | 4 +- script/integration-test | 5 +- 8 files changed, 155 insertions(+), 3 deletions(-) rename integration_data/{ => v5}/documents.dat (100%) create mode 100644 integration_data/v6/documents.dat create mode 100644 integration_data/v7/documents.dat create mode 100644 integration_data/v8/documents.dat create mode 100644 integration_data/v8/elasticsearch-cluster-compose.yml create mode 100644 integration_data/v8/elasticsearch.dockerfile diff --git a/integration_data/documents.dat b/integration_data/v5/documents.dat similarity index 100% rename from integration_data/documents.dat rename to integration_data/v5/documents.dat diff --git a/integration_data/v6/documents.dat b/integration_data/v6/documents.dat new file mode 100644 index 0000000..40750dc --- /dev/null +++ b/integration_data/v6/documents.dat @@ -0,0 +1,20 @@ +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } diff --git a/integration_data/v7/documents.dat b/integration_data/v7/documents.dat new file mode 100644 index 0000000..40750dc --- /dev/null +++ b/integration_data/v7/documents.dat @@ -0,0 +1,20 @@ +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test", "_type" : "testdoc" } } +{ "stat" : "test_data", "value": "foo" } diff --git a/integration_data/v8/documents.dat b/integration_data/v8/documents.dat new file mode 100644 index 0000000..f331706 --- /dev/null +++ b/integration_data/v8/documents.dat @@ -0,0 +1,20 @@ +{ "index" : { "_index" : "integration_test" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test"} } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test" } } +{ "stat" : "test_data", "value": "foo" } +{ "index" : { "_index" : "integration_test" } } +{ "stat" : "test_data", "value": "foo" } diff --git a/integration_data/v8/elasticsearch-cluster-compose.yml b/integration_data/v8/elasticsearch-cluster-compose.yml new file mode 100644 index 0000000..504fccd --- /dev/null +++ b/integration_data/v8/elasticsearch-cluster-compose.yml @@ -0,0 +1,82 @@ +version: '3' + +services: + elasticsearch-v8: + build: + context: . + dockerfile: elasticsearch.dockerfile + image: vulcanizer-elasticsearch:v8 + container_name: vulcanizer-elasticsearch-v8 + environment: + - cluster.name=vulcanizer-elasticsearch-v8 + - node.name=vulcanizer-elasticsearch-v8 + - bootstrap.memory_lock=true + - "path.repo=/backups" + - "discovery.seed_hosts=vulcanizer-elasticsearch-v8,vulcanizer-elasticsearch-v8-2" + - "cluster.initial_master_nodes=vulcanizer-elasticsearch-v8,vulcanizer-elasticsearch-v8-2" + - "ES_JAVA_OPTS=-Xms512M -Xmx512M" + - xpack.security.enabled=false + - xpack.profiling.enabled=false + - xpack.ml.enabled=false + - xpack.graph.enabled=false + - xpack.watcher.enabled=false + - ingest.geoip.downloader.enabled=false + - indices.lifecycle.history_index_enabled=false + ulimits: + memlock: + soft: -1 + hard: -1 + nofile: + soft: 65536 + hard: 65536 + cap_add: + - IPC_LOCK + ports: + - 49200:9200 + networks: + - vulcanizer-esnet + volumes: + - vulcanizer-backup-volume:/backups + + elasticsearch-v8-2: + build: + context: . + dockerfile: elasticsearch.dockerfile + image: vulcanizer-elasticsearch:v8 + depends_on: + - "elasticsearch-v8" + container_name: vulcanizer-elasticsearch-v8-2 + environment: + - cluster.name=vulcanizer-elasticsearch-v8 + - bootstrap.memory_lock=true + - node.name=vulcanizer-elasticsearch-v8-2 + - "path.repo=/backups" + - "discovery.seed_hosts=vulcanizer-elasticsearch-v8,vulcanizer-elasticsearch-v8-2" + - "cluster.initial_master_nodes=vulcanizer-elasticsearch-v8,vulcanizer-elasticsearch-v8-2" + - "ES_JAVA_OPTS=-Xms512M -Xmx512M" + - xpack.security.enabled=false + - xpack.profiling.enabled=false + - xpack.ml.enabled=false + - xpack.graph.enabled=false + - xpack.watcher.enabled=false + - ingest.geoip.downloader.enabled=false + - indices.lifecycle.history_index_enabled=false + ulimits: + memlock: + soft: -1 + hard: -1 + nofile: + soft: 65536 + hard: 65536 + cap_add: + - IPC_LOCK + networks: + - vulcanizer-esnet + volumes: + - vulcanizer-backup-volume:/backups + +networks: + vulcanizer-esnet: + +volumes: + vulcanizer-backup-volume: diff --git a/integration_data/v8/elasticsearch.dockerfile b/integration_data/v8/elasticsearch.dockerfile new file mode 100644 index 0000000..c3143d7 --- /dev/null +++ b/integration_data/v8/elasticsearch.dockerfile @@ -0,0 +1,7 @@ +FROM docker.elastic.co/elasticsearch/elasticsearch:8.13.0 + +USER root + +RUN mkdir /backups && chown elasticsearch:elasticsearch /backups + +USER elasticsearch diff --git a/integration_test.go b/integration_test.go index 2a13e85..9c70eba 100644 --- a/integration_test.go +++ b/integration_test.go @@ -1,11 +1,13 @@ +//go:build integration // +build integration package vulcanizer_test import ( - "github.com/github/vulcanizer" "testing" "time" + + "github.com/github/vulcanizer" ) func TestNodes(t *testing.T) { diff --git a/script/integration-test b/script/integration-test index da2ec29..868b5df 100755 --- a/script/integration-test +++ b/script/integration-test @@ -17,7 +17,8 @@ fi # Run regular unit tests first ./script/test -elasticsearch_versions=(v5 v6 v7) +# These match integration_data/ sub-folder names +elasticsearch_versions=(v5 v6 v7 v8) for v in "${elasticsearch_versions[@]}" do @@ -44,7 +45,7 @@ do } }' echo "Filling in data to integration_test index" - curl -s -H "Content-Type: application/x-ndjson" -XPOST localhost:49200/_bulk --data-binary "@../documents.dat"; echo + curl -s -H "Content-Type: application/x-ndjson" -XPOST localhost:49200/_bulk --data-binary "@documents.dat"; echo echo "Creating snapshot repository backup-repo" curl -H "Content-Type: application/json" -XPUT localhost:49200/_snapshot/backup-repo -d '{ "type": "fs", "settings": { "location": "/backups" } }' echo "Making snapshot snapshot_1 in repository backup-repo" From 8fc22a29bf4f8a5765f4c650060ead1b76e50906 Mon Sep 17 00:00:00 2001 From: hoenn Date: Tue, 2 Apr 2024 12:11:04 -0400 Subject: [PATCH 2/2] Remove deprecated docker-compose usage --- script/integration-test | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/script/integration-test b/script/integration-test index 868b5df..0c0229e 100755 --- a/script/integration-test +++ b/script/integration-test @@ -23,7 +23,7 @@ elasticsearch_versions=(v5 v6 v7 v8) for v in "${elasticsearch_versions[@]}" do # Make sure everything is clear from previous runs - docker-compose -f integration_data/$v/elasticsearch-cluster-compose.yml down + docker compose -f integration_data/$v/elasticsearch-cluster-compose.yml down docker volume rm ${v}_vulcanizer-backup-volume || echo "OK for volume not to exist" done @@ -31,7 +31,7 @@ for v in "${elasticsearch_versions[@]}" do cd integration_data/$v echo "Running integration tests for Elasticsearch $v" - docker-compose -f elasticsearch-cluster-compose.yml up --build -d + docker compose -f elasticsearch-cluster-compose.yml up --build -d echo "Wait for Elasticsearch $v to start..." until foo=$(curl -s localhost:49200) do @@ -56,6 +56,6 @@ do # Run tests go test -v github.com/github/vulcanizer/... -tags integration -count=1 - docker-compose -f elasticsearch-cluster-compose.yml down + docker compose -f elasticsearch-cluster-compose.yml down cd ../../ done