Run CI/local envs with ES 7 and update client libs (#19441)

* Run CI/local envs with ES 7 and update client libs

Also drive-by update CI to have the right versions of MySQL/Redis

* Remove deprecated, no longer necessary send_get_body_as parameter

* Fix deprecation warnings

* Update tests

* Unused import

* More deprecation fixes
This commit is contained in:
Mathieu Pillard 2022-07-11 11:35:18 +02:00 коммит произвёл GitHub
Родитель fe26fbe540
Коммит 97ba20c9a2
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
12 изменённых файлов: 54 добавлений и 156 удалений

Просмотреть файл

@ -284,9 +284,9 @@ references:
docker-images:
# These versions should try to match what we run in production.
- &python "cimg/python:3.9"
- &redis "redis:2.8"
- &redis "redis:6.2"
- &memcached "memcached:1.4"
- &mysql "mysql:8.0.21"
- &mysql "mysql:8.0.28"
mysql-config: &mysql-config
environment:
@ -314,16 +314,13 @@ references:
defaults-with-elasticsearch: &defaults-with-elasticsearch
<<: *defaults
parameters:
elasticsearchversion:
type: string
docker:
- image: *python
- image: *redis
- image: *memcached
- image: *mysql
<<: *mysql-config
- image: docker.elastic.co/elasticsearch/elasticsearch:<< parameters.elasticsearchversion >>
- image: docker.elastic.co/elasticsearch/elasticsearch:7.17.3
environment:
# Disable all xpack related features to avoid unrelated logging in
# docker logs. https://github.com/mozilla/addons-server/issues/8887
@ -660,12 +657,7 @@ workflows:
- docs
- main
- reviewers-and-zadmin
- es-tests:
matrix:
parameters:
elasticsearchversion:
- 6.8.23
- 7.17.2
- es-tests
- release-master:
filters:
branches:

4
.github/dependabot.yml поставляемый
Просмотреть файл

@ -17,10 +17,10 @@ updates:
- ">= 6"
- dependency-name: elasticsearch
versions:
- ">= 7"
- ">= 8"
- dependency-name: elasticsearch-dsl
versions:
- ">= 7"
- ">= 8"
- dependency-name: kombu
versions:
- ">= 6"

Просмотреть файл

@ -63,7 +63,7 @@ services:
- MYSQL_DATABASE=olympia
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:6.8.23
image: docker.elastic.co/elasticsearch/elasticsearch:7.17.3
environment:
# Disable all xpack related features to avoid unrelated logging
# in docker logs. https://github.com/mozilla/addons-server/issues/8887

Просмотреть файл

@ -329,12 +329,12 @@ drf-nested-routers==0.93.4 \
--hash=sha256:996b77f3f4dfaf64569e7b8f04e3919945f90f95366838ca5b8bed9dd709d6c5 \
--hash=sha256:01aa556b8c08608bb74fb34f6ca065a5183f2cda4dc0478192cc17a2581d71b0
# elasticsearch is required by elasticsearch-dsl
elasticsearch==6.8.2 \
--hash=sha256:1aedf00b73f5d1e77cb4df70fec58f2efb664be4ce2686374239aa6c0373c65c \
--hash=sha256:c3a560bb83e4981b5a5c82080d2ceb99686d33692ef53365656129478aa5ddb2
elasticsearch-dsl==6.4.0 \
--hash=sha256:f60aea7fd756ac1fbe7ce114bbf4949aefbf495dfe8896640e787c67344f12f6 \
--hash=sha256:26416f4dd46ceca43d62ef74970d9de4bdd6f4b0f163316f0b432c9e61a08bec
elasticsearch==7.17.4 \
--hash=sha256:540d646908761120926850e98230ffc08fee9a3f9b45073d42de08246f598652 \
--hash=sha256:81c1d15b0f9382b2dc40a896c634f8de09bfcd5079e19a8412940a9ddfbde64b
elasticsearch-dsl==7.4.0 \
--hash=sha256:046ea10820b94c075081b528b4526c5bc776bda4226d702f269a5f203232064b \
--hash=sha256:c4a7b93882918a413b63bed54018a1685d7410ffd8facbc860ee7fd57f214a6d
email-reply-parser==0.5.12 \
--hash=sha256:d051cfa8f54046f7399553aae57f17f62bf1652f83e3e9970fd109c0f24a880c \
--hash=sha256:3499c02284679e020acf8aa30ef9e43c62f9ab5ccee0a35bfac85a0c1fa685fd \

Просмотреть файл

@ -1,5 +1,3 @@
import copy
from django.conf import settings
from olympia.constants.promoted import RECOMMENDED
@ -728,17 +726,15 @@ class AddonIndexer:
Intended to be used by reindexation (and tests), generally a bad idea
to call manually.
"""
index_settings = copy.deepcopy(cls.index_settings)
config = {
'mappings': cls.get_mapping(),
'settings': {
# create_index will add its own index settings like number of
create_index(
index=index_name,
mappings=cls.get_mapping(),
index_settings={
# create_index() will add its own index settings like number of
# shards and replicas.
'index': index_settings
'index': cls.index_settings
},
}
create_index(index_name, config)
)
@classmethod
def reindex_tasks_group(cls, index_name):

Просмотреть файл

@ -4083,7 +4083,7 @@ class TestAddonSearchView(ESTestCase):
response = qset.execute()
source_keys = response.hits.hits[0]['_source'].keys()
source_keys = response.hits.hits[0]['_source'].to_dict().keys()
assert not any(
key in source_keys
@ -5170,7 +5170,7 @@ class TestAddonAutoCompleteSearchView(ESTestCase):
# Sort by type to avoid sorting problems before picking the
# first result. (We have a theme and an add-on)
hit = sorted(response.hits.hits, key=lambda x: x['_source']['type'])
assert set(hit[1]['_source'].keys()) == includes
assert set(hit[1]['_source'].to_dict().keys()) == includes
def test_no_unlisted(self):
addon_factory(

Просмотреть файл

@ -5,6 +5,4 @@ from elasticsearch import Elasticsearch
def get_es():
"""Create an ES object and return it."""
return Elasticsearch(
settings.ES_HOSTS, timeout=settings.ES_TIMEOUT, send_get_body_as='POST'
)
return Elasticsearch(settings.ES_HOSTS, timeout=settings.ES_TIMEOUT)

Просмотреть файл

@ -978,8 +978,8 @@ class ESTestCaseMixin:
if key.startswith('test_'):
if cls.es.indices.exists_alias(name=key):
cls.es.indices.delete_alias(index='*', name=key, ignore=[404])
elif cls.es.indices.exists(key):
cls.es.indices.delete(key, ignore=[404])
elif cls.es.indices.exists(index=key):
cls.es.indices.delete(index=key, ignore=[404])
# Figure out the name of the indices we're going to create from the
# suffixes generated at import time. Like the aliases later, the name
@ -1005,7 +1005,7 @@ class ESTestCaseMixin:
},
]
cls.es.indices.update_aliases({'actions': actions})
cls.es.indices.update_aliases(body={'actions': actions})
super().setUpClass()
def setUp(self):
@ -1016,21 +1016,21 @@ class ESTestCaseMixin:
@classmethod
def refresh(cls, index='default'):
cls.es.indices.refresh(settings.ES_INDEXES.get(index, index))
cls.es.indices.refresh(index=settings.ES_INDEXES.get(index, index))
@classmethod
def reindex(cls, model, index='default'):
# Emit post-save signal so all of the objects get reindexed.
manager = getattr(model, 'unfiltered', model.objects)
[post_save.send(model, instance=o, created=False) for o in manager.all()]
cls.refresh(index)
cls.refresh(index=index)
@classmethod
def empty_index(cls, index):
# Try to make sure that all changes are properly flushed.
cls.refresh(index)
cls.refresh(index=index)
cls.es.delete_by_query(
settings.ES_INDEXES[index],
index=settings.ES_INDEXES[index],
body={'query': {'match_all': {}}},
conflicts='proceed',
)

Просмотреть файл

@ -38,13 +38,13 @@ def get_indexer(alias):
def delete_indexes(indexes):
indices = ','.join(indexes)
logger.info('Removing indices %r' % indices)
ES.indices.delete(indices, ignore=[404, 500])
ES.indices.delete(index=indices, ignore=[404, 500])
@task
def update_aliases(actions):
logger.info('Rebuilding aliases with actions: %s' % actions)
ES.indices.update_aliases({'actions': actions})
ES.indices.update_aliases(body={'actions': actions})
@task(ignore_result=False)
@ -162,8 +162,8 @@ class Command(BaseCommand):
# happens if data was indexed before the first reindex was
# done) doesn't matter.
try:
index = next(iter(ES.indices.get(alias)))
ES.indices.delete(index)
index = next(iter(ES.indices.get(index=alias)))
ES.indices.delete(index=index)
except NotFoundError:
pass
else:
@ -190,7 +190,7 @@ class Command(BaseCommand):
old_index = None
try:
olds = ES.indices.get_alias(alias)
olds = ES.indices.get_alias(index=alias)
for old_index in olds:
# Mark the index to be removed later.
to_remove.append(old_index)
@ -208,7 +208,7 @@ class Command(BaseCommand):
# If old_index is None that could mean it's a full index.
# In that case we want to continue index in it.
if ES.indices.exists(alias):
if ES.indices.exists(index=alias):
old_index = alias
# Main chain for this alias that:

Просмотреть файл

@ -48,7 +48,7 @@ class TestIndexCommand(ESTestCaseMixin, PatchMixin, TransactionTestCase):
current_indices = self.es.indices.stats()['indices'].keys()
for index in current_indices:
if index not in self.indices:
self.es.indices.delete(index, ignore=404)
self.es.indices.delete(index=index, ignore=404)
super().tearDown()
@classmethod
@ -64,7 +64,7 @@ class TestIndexCommand(ESTestCaseMixin, PatchMixin, TransactionTestCase):
"""Make sure the indices settings are properly set."""
for index, alias in new_indices:
settings = self.es.indices.get_settings(alias)[index]['settings']
settings = self.es.indices.get_settings(index=alias)[index]['settings']
# These should be set in settings_test.
assert int(settings['index']['number_of_replicas']) == 0

Просмотреть файл

@ -3,43 +3,12 @@ from unittest import mock
from olympia.addons.indexers import AddonIndexer
from olympia.amo.tests import addon_factory, TestCase
from olympia.lib.es.models import Reindexing
from olympia.lib.es.utils import index_objects, get_major_version
from olympia.lib.es.utils import index_objects
@mock.patch('olympia.lib.es.utils.get_major_version')
@mock.patch('olympia.lib.es.utils.helpers')
class TestIndexObjects(TestCase):
def test_index_objects(self, helpers_mock, get_major_version_mock):
get_major_version_mock.return_value = 6
addon1 = addon_factory()
addon2 = addon_factory()
fake_extract = {
addon1.pk: mock.Mock(),
addon2.pk: mock.Mock(),
}
with mock.patch.object(
AddonIndexer, 'extract_document', lambda a: fake_extract[a.pk]
):
index_objects(ids=[addon1.pk, addon2.pk], indexer_class=AddonIndexer)
bulk_mock = helpers_mock.bulk
assert bulk_mock.call_count == 1
assert bulk_mock.call_args[0][1] == [
{
'_source': fake_extract[addon1.pk],
'_id': addon1.pk,
'_index': 'test_amo_addons',
'_type': 'addons',
},
{
'_source': fake_extract[addon2.pk],
'_id': addon2.pk,
'_index': 'test_amo_addons',
'_type': 'addons',
},
]
def test_index_objects_elasticsearch_7(self, helpers_mock, get_major_version_mock):
get_major_version_mock.return_value = 7
def test_index_objects(self, helpers_mock):
addon1 = addon_factory()
addon2 = addon_factory()
fake_extract = {
@ -65,9 +34,8 @@ class TestIndexObjects(TestCase):
},
]
def test_index_objects_with_index(self, helpers_mock, get_major_version_mock):
def test_index_objects_with_index(self, helpers_mock):
target_index = 'amazing_index'
get_major_version_mock.return_value = 6
addon1 = addon_factory()
addon2 = addon_factory()
fake_extract = {
@ -89,22 +57,19 @@ class TestIndexObjects(TestCase):
'_source': fake_extract[addon1.pk],
'_id': addon1.pk,
'_index': target_index,
'_type': 'addons',
},
{
'_source': fake_extract[addon2.pk],
'_id': addon2.pk,
'_index': target_index,
'_type': 'addons',
},
]
def test_index_objects_while_reindexing(self, helpers_mock, get_major_version_mock):
def test_index_objects_while_reindexing(self, helpers_mock):
target_index = AddonIndexer.get_index_alias() # the default index
Reindexing.objects.create(
alias=target_index, old_index='old_index', new_index='new_index'
)
get_major_version_mock.return_value = 6
addon1 = addon_factory()
addon2 = addon_factory()
fake_extract = {
@ -128,36 +93,29 @@ class TestIndexObjects(TestCase):
'_source': fake_extract[addon1.pk],
'_id': addon1.pk,
'_index': 'new_index',
'_type': 'addons',
},
{
'_source': fake_extract[addon1.pk],
'_id': addon1.pk,
'_index': 'old_index',
'_type': 'addons',
},
{
'_source': fake_extract[addon2.pk],
'_id': addon2.pk,
'_index': 'new_index',
'_type': 'addons',
},
{
'_source': fake_extract[addon2.pk],
'_id': addon2.pk,
'_index': 'old_index',
'_type': 'addons',
},
]
def test_index_objects_with_index_while_reindexing(
self, helpers_mock, get_major_version_mock
):
def test_index_objects_with_index_while_reindexing(self, helpers_mock):
target_index = 'amazing_index'
Reindexing.objects.create(
alias=target_index, old_index='old_index', new_index='new_index'
)
get_major_version_mock.return_value = 6
addon1 = addon_factory()
addon2 = addon_factory()
fake_extract = {
@ -181,36 +139,10 @@ class TestIndexObjects(TestCase):
'_source': fake_extract[addon1.pk],
'_id': addon1.pk,
'_index': 'amazing_index',
'_type': 'addons',
},
{
'_source': fake_extract[addon2.pk],
'_id': addon2.pk,
'_index': 'amazing_index',
'_type': 'addons',
},
]
def test_get_major_version():
info = {
'name': '_w5frMV',
'cluster_name': 'docker-cluster',
'cluster_uuid': 'SGCal9MVRN6JKOQptxOQJA',
'version': {
'number': '6.8.23',
'build_flavor': 'default',
'build_type': 'docker',
'build_hash': '4f67856',
'build_date': '2022-01-06T21:30:50.087716Z',
'build_snapshot': False,
'lucene_version': '7.7.3',
'minimum_wire_compatibility_version': '5.6.0',
'minimum_index_compatibility_version': '5.0.0',
},
'tagline': 'You Know, for Search',
}
get_major_version(mock.Mock(info=lambda: info)) == 6
info['version']['number'] = '7.17.2'
get_major_version(mock.Mock(info=lambda: info)) == 7

Просмотреть файл

@ -18,10 +18,6 @@ from .models import Reindexing
log = olympia.core.logger.getLogger('z.es')
def get_major_version(es):
return int(es.info()['version']['number'].split('.')[0])
def index_objects(
*, ids, indexer_class, index=None, transforms=None, manager_name=None
):
@ -64,7 +60,6 @@ def index_objects(
bulk = []
es = amo_search.get_es()
major_version = get_major_version(es)
for obj in qs.order_by('pk'):
data = indexer_class.extract_document(obj)
for index in indices:
@ -73,13 +68,6 @@ def index_objects(
'_id': obj.id,
'_index': index,
}
if major_version < 7:
# While on 6.x, we use the `addons` type when creating indices
# and when bulk-indexing. We completely ignore it on searches.
# When on 7.x, we don't pass type at all at creation or
# indexing, and continue to ignore it on searches.
# That should ensure we're compatible with both transparently.
item['_type'] = 'addons'
bulk.append(item)
return helpers.bulk(es, bulk)
@ -105,7 +93,7 @@ def timestamp_index(index):
return '{}-{}'.format(index, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
def create_index(index, config=None):
def create_index(*, index, mappings, index_settings=None):
"""Create an index if it's not present.
Return the index name.
@ -113,34 +101,26 @@ def create_index(index, config=None):
Options:
- index: name of the index.
- config: if provided, used when passing the configuration of the index to
ES.
- mappings and index_settings: if provided, used when passing the
configuration of the index to ES.
"""
es = amo_search.get_es()
if config is None:
config = {}
if 'settings' not in config:
config['settings'] = {'index': {}}
if index_settings is None:
index_settings = {'index': {}}
else:
# Make a deepcopy of the settings in the config that was passed, so
# that we can modify it freely to add shards and replicas settings.
config['settings'] = deepcopy(config['settings'])
# Make a deepcopy of the settings that was passed, so that we can
# modify it freely to add shards and replicas settings.
index_settings = deepcopy(index_settings)
config['settings']['index'].update(
index_settings['index'].update(
{
'number_of_shards': settings.ES_DEFAULT_NUM_SHARDS,
'number_of_replicas': settings.ES_DEFAULT_NUM_REPLICAS,
'max_result_window': settings.ES_MAX_RESULT_WINDOW,
}
)
major_version = get_major_version(es)
if not es.indices.exists(index):
# See above, while on 6.x the mapping needs to include the `addons` doc
# type.
if major_version < 7:
config['mappings'] = {'addons': config['mappings']}
es.indices.create(index, body=config)
if not es.indices.exists(index=index):
es.indices.create(index=index, mappings=mappings, settings=index_settings)
return index