зеркало из https://github.com/github/vulcanizer.git
Refactor integration test to add v7
This commit is contained in:
Родитель
ac86d41fe5
Коммит
1a96650cb3
|
@ -1530,8 +1530,6 @@ func TestGetShardOverlap_Safe(t *testing.T) {
|
|||
} else {
|
||||
t.Errorf("Expected overlap data, got nil instead")
|
||||
}
|
||||
|
||||
fmt.Println(overlap)
|
||||
}
|
||||
|
||||
func TestGetShardOverlap_UnSafe(t *testing.T) {
|
||||
|
@ -1567,8 +1565,6 @@ func TestGetShardOverlap_UnSafe(t *testing.T) {
|
|||
} else {
|
||||
t.Errorf("Expected overlap data, got nil instead")
|
||||
}
|
||||
|
||||
fmt.Println(overlap)
|
||||
}
|
||||
|
||||
func TestGetShardOverlap_UnSafeRelocating(t *testing.T) {
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
version: '3'
|
||||
|
||||
services:
|
||||
elasticsearch-v7:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: elasticsearch.dockerfile
|
||||
image: vulcanizer-elasticsearch:v7
|
||||
container_name: vulcanizer-elasticsearch-v7
|
||||
environment:
|
||||
- cluster.name=vulcanizer-elasticsearch-v7
|
||||
- node.name=vulcanizer-elasticsearch-v7
|
||||
- bootstrap.memory_lock=true
|
||||
- "path.repo=/backups"
|
||||
- "discovery.seed_hosts=vulcanizer-elasticsearch-v7,vulcanizer-elasticsearch-v7-2"
|
||||
- "cluster.initial_master_nodes=vulcanizer-elasticsearch-v7,vulcanizer-elasticsearch-v7-2"
|
||||
- "ES_JAVA_OPTS=-Xms512M -Xmx512M"
|
||||
- xpack.security.enabled=false
|
||||
- xpack.monitoring.enabled=false
|
||||
- xpack.ml.enabled=false
|
||||
- xpack.graph.enabled=false
|
||||
- xpack.watcher.enabled=false
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
nofile:
|
||||
soft: 65536
|
||||
hard: 65536
|
||||
cap_add:
|
||||
- IPC_LOCK
|
||||
ports:
|
||||
- 49200:9200
|
||||
networks:
|
||||
- vulcanizer-esnet
|
||||
volumes:
|
||||
- vulcanizer-backup-volume:/backups
|
||||
|
||||
elasticsearch-v7-2:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: elasticsearch-v7.dockerfile
|
||||
image: vulcanizer-elasticsearch:v7
|
||||
depends_on:
|
||||
- "elasticsearch-v7"
|
||||
container_name: vulcanizer-elasticsearch-v7-2
|
||||
environment:
|
||||
- cluster.name=vulcanizer-elasticsearch-v7
|
||||
- bootstrap.memory_lock=true
|
||||
- node.name=vulcanizer-elasticsearch-v7-2
|
||||
- "path.repo=/backups"
|
||||
- "discovery.seed_hosts=vulcanizer-elasticsearch-v7,vulcanizer-elasticsearch-v7-2"
|
||||
- "cluster.initial_master_nodes=vulcanizer-elasticsearch-v7,vulcanizer-elasticsearch-v7-2"
|
||||
- "ES_JAVA_OPTS=-Xms512M -Xmx512M"
|
||||
- xpack.security.enabled=false
|
||||
- xpack.monitoring.enabled=false
|
||||
- xpack.ml.enabled=false
|
||||
- xpack.graph.enabled=false
|
||||
- xpack.watcher.enabled=false
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
nofile:
|
||||
soft: 65536
|
||||
hard: 65536
|
||||
cap_add:
|
||||
- IPC_LOCK
|
||||
networks:
|
||||
- vulcanizer-esnet
|
||||
volumes:
|
||||
- vulcanizer-backup-volume:/backups
|
||||
|
||||
networks:
|
||||
vulcanizer-esnet:
|
||||
|
||||
volumes:
|
||||
vulcanizer-backup-volume:
|
|
@ -0,0 +1,7 @@
|
|||
FROM docker.elastic.co/elasticsearch/elasticsearch:7.1.0
|
||||
|
||||
USER root
|
||||
|
||||
RUN mkdir /backups && chown elasticsearch:elasticsearch /backups
|
||||
|
||||
USER elasticsearch
|
|
@ -343,8 +343,8 @@ func TestGetShards_AllNodes(t *testing.T) {
|
|||
}
|
||||
|
||||
// Account for the unassigned replicas
|
||||
if len(val) != 15 {
|
||||
t.Fatalf("Expected 15 shards, got %d instead", len(val))
|
||||
if len(val) != 6 {
|
||||
t.Fatalf("Expected 6 shards, got %d instead", len(val))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -361,8 +361,8 @@ func TestGetShards_Regexp(t *testing.T) {
|
|||
t.Fatal("Expected a slice of Shard, got nil instead")
|
||||
}
|
||||
|
||||
if len(val) != 10 {
|
||||
t.Fatalf("Expected 15 shards, got %d instead", len(val))
|
||||
if len(val) != 4 {
|
||||
t.Fatalf("Expected 4 shards, got %d instead", len(val))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,47 +4,45 @@ set -e
|
|||
# Run regular unit tests first
|
||||
./script/test
|
||||
|
||||
# Make sure everything is clear from previous runs
|
||||
docker-compose -f integration_data/v5/elasticsearch-cluster-compose.yml down
|
||||
docker-compose -f integration_data/v6/elasticsearch-cluster-compose.yml down
|
||||
docker volume rm v5_vulcanizer-backup-volume || echo "OK for volume not to exist"
|
||||
docker volume rm v6_vulcanizer-backup-volume || echo "OK for volume not to exist"
|
||||
elasticsearch_versions=(v5 v6 v7)
|
||||
|
||||
echo "Running integration tests for Elasticsearch v5"
|
||||
cd integration_data/v5
|
||||
docker-compose -f elasticsearch-cluster-compose.yml up -d
|
||||
echo "Wait for Elasticsearch v5 to start..."
|
||||
until foo=$(curl -s localhost:49200)
|
||||
for v in "${elasticsearch_versions[@]}"
|
||||
do
|
||||
sleep 10
|
||||
# Make sure everything is clear from previous runs
|
||||
docker-compose -f integration_data/$v/elasticsearch-cluster-compose.yml down
|
||||
docker volume rm ${v}_vulcanizer-backup-volume || echo "OK for volume not to exist"
|
||||
done
|
||||
curl -s -H "Content-Type: application/x-ndjson" -XPOST localhost:49200/_bulk --data-binary "@../documents.dat"; echo
|
||||
curl -s -XPUT localhost:49200/_snapshot/backup-repo -d '{ "type": "fs", "settings": { "location": "/backups" } }'
|
||||
curl -s -XPUT localhost:49200/_snapshot/backup-repo/snapshot_1?wait_for_completion=true
|
||||
curl -s -H "Content-Type: application/json" XPOST localhost:49200/_aliases -d '{ "actions" : [ { "add" : { "index" : "integration_test", "alias" : "integration_test_alias" } } ] }'
|
||||
|
||||
# Run tests
|
||||
go test -v github.com/github/vulcanizer/... -tags integration -count=1
|
||||
|
||||
docker-compose -f elasticsearch-cluster-compose.yml down
|
||||
|
||||
|
||||
cd ../v6
|
||||
|
||||
echo "Running integration tests for Elasticsearch v6"
|
||||
docker-compose -f elasticsearch-cluster-compose.yml up -d
|
||||
# Wait for Elasticsearch to start...
|
||||
echo "Wait for Elasticsearch v6 to start..."
|
||||
until foo=$(curl -s localhost:49200)
|
||||
for v in "${elasticsearch_versions[@]}"
|
||||
do
|
||||
sleep 10
|
||||
|
||||
cd integration_data/$v
|
||||
echo "Running integration tests for Elasticsearch $v"
|
||||
docker-compose -f elasticsearch-cluster-compose.yml up -d
|
||||
echo "Wait for Elasticsearch $v to start..."
|
||||
until foo=$(curl -s localhost:49200)
|
||||
do
|
||||
sleep 10
|
||||
done
|
||||
|
||||
echo "Creating integration_test index"
|
||||
curl -s -H "Content-Type: application/json" -XPUT localhost:49200/integration_test -d'{
|
||||
"settings": {
|
||||
"number_of_shards": 2
|
||||
}
|
||||
}'
|
||||
echo "Filling in data to integration_test index"
|
||||
curl -s -H "Content-Type: application/x-ndjson" -XPOST localhost:49200/_bulk --data-binary "@../documents.dat"; echo
|
||||
echo "Creating snapshot repository backup-repo"
|
||||
curl -H "Content-Type: application/json" -XPUT localhost:49200/_snapshot/backup-repo -d '{ "type": "fs", "settings": { "location": "/backups" } }'
|
||||
echo "Making snapshot snapshot_1 in repository backup-repo"
|
||||
curl -s -XPUT localhost:49200/_snapshot/backup-repo/snapshot_1?wait_for_completion=true
|
||||
echo "Adding alias integration_test_alias"
|
||||
curl -s -H "Content-Type: application/json" XPOST localhost:49200/_aliases -d '{ "actions" : [ { "add" : { "index" : "integration_test", "alias" : "integration_test_alias" } } ] }'
|
||||
|
||||
# Run tests
|
||||
go test -v github.com/github/vulcanizer/... -tags integration -count=1
|
||||
|
||||
docker-compose -f elasticsearch-cluster-compose.yml down
|
||||
cd ../../
|
||||
done
|
||||
curl -s -H "Content-Type: application/x-ndjson" -XPOST localhost:49200/_bulk --data-binary "@../documents.dat"; echo
|
||||
curl -s -XPUT -H 'Content-Type: application/json' localhost:49200/_snapshot/backup-repo -d '{ "type": "fs", "settings": { "location": "/backups" } }'
|
||||
curl -s -XPUT localhost:49200/_snapshot/backup-repo/snapshot_1?wait_for_completion=true
|
||||
curl -s -H "Content-Type: application/json" XPOST localhost:49200/_aliases -d '{ "actions" : [ { "add" : { "index" : "integration_test", "alias" : "integration_test_alias" } } ] }'
|
||||
|
||||
# Run tests
|
||||
go test -v github.com/github/vulcanizer/... -tags integration -count=1
|
||||
|
||||
docker-compose -f elasticsearch-cluster-compose.yml down
|
||||
|
|
Загрузка…
Ссылка в новой задаче