зеркало из https://github.com/mozilla/bugbug.git
Misc fixes for the HTTP service and the integration tests (#1332)
* Simplify handling of HTTP service directory where to download models and correct installation of http_service package in the integration test * Log http worker boot step and allow missing DB * Retry for 10 minutes to allow the worker boot to finish * Add more logging and wait more time * Wait 30 seconds between requests as a workaround for https://github.com/mozilla/bugbug/issues/1340 Co-authored-by: Marco Castelluccio <mcastelluccio@mozilla.com>
This commit is contained in:
Родитель
58bd844553
Коммит
791c16ebe0
|
@ -3,29 +3,39 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from bugbug import db, repository, test_scheduling
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def boot_worker():
|
||||
|
||||
# Clone mozilla central
|
||||
repo_dir = os.path.join(tempfile.gettempdir(), "bugbug-hg")
|
||||
logger.info(f"Cloning mozilla-central in {repo_dir}...")
|
||||
repository.clone(repo_dir)
|
||||
|
||||
# Download databases
|
||||
assert db.download_support_file(
|
||||
logger.info("Downloading test scheduling DB support file...")
|
||||
db.download_support_file(
|
||||
test_scheduling.TEST_LABEL_SCHEDULING_DB, test_scheduling.PAST_FAILURES_LABEL_DB
|
||||
)
|
||||
|
||||
# Download commits DB
|
||||
logger.info("Downloading commits DB...")
|
||||
assert db.download(repository.COMMITS_DB, support_files_too=True)
|
||||
|
||||
# And update it
|
||||
logger.info("Browsing all commits...")
|
||||
for commit in repository.get_commits():
|
||||
pass
|
||||
|
||||
rev_start = "children({})".format(commit["node"])
|
||||
logger.info("Updating commits DB...")
|
||||
repository.download_commits(repo_dir, rev_start)
|
||||
|
||||
logger.info("Worker boot done")
|
||||
|
|
|
@ -27,7 +27,7 @@ MODELS_NAMES = [
|
|||
"stepstoreproduce",
|
||||
"spambug",
|
||||
]
|
||||
MODELS_DIR = os.path.join(os.path.dirname(__file__), "models")
|
||||
MODELS_DIR = "models"
|
||||
BASE_URL = "https://community-tc.services.mozilla.com/api/index/v1/task/project.relman.bugbug.train_{}.latest/artifacts/public"
|
||||
DEFAULT_EXPIRATION_TTL = 7 * 24 * 3600 # A week
|
||||
|
||||
|
|
|
@ -15,16 +15,18 @@ def integration_test():
|
|||
# First try to classify a single bug
|
||||
single_bug_url = f"{BUGBUG_HTTP_SERVER}/defectenhancementtask/predict/1376406"
|
||||
response = None
|
||||
for i in range(100):
|
||||
response = requests.get(single_bug_url, headers={"X-Api-Key": "Test"})
|
||||
for i in range(900):
|
||||
response = requests.get(
|
||||
single_bug_url, headers={"X-Api-Key": "integration_test_single"}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
break
|
||||
|
||||
time.sleep(1)
|
||||
time.sleep(30)
|
||||
|
||||
if not response:
|
||||
raise Exception("Couldn't get an answer in 100 seconds")
|
||||
raise Exception("Couldn't get an answer in 900 seconds")
|
||||
|
||||
response_json = response.json()
|
||||
print("Response for bug 1376406", response_json)
|
||||
|
@ -36,13 +38,15 @@ def integration_test():
|
|||
response = None
|
||||
for i in range(100):
|
||||
response = requests.post(
|
||||
batch_url, headers={"X-Api-Key": "Test"}, json={"bugs": bug_ids}
|
||||
batch_url,
|
||||
headers={"X-Api-Key": "integration_test_batch"},
|
||||
json={"bugs": bug_ids},
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
break
|
||||
|
||||
time.sleep(1)
|
||||
time.sleep(30)
|
||||
|
||||
if not response:
|
||||
raise Exception("Couldn't get an answer in 100 seconds")
|
||||
|
|
|
@ -6,7 +6,8 @@ set -euox pipefail
|
|||
|
||||
# Supposed to be run from the repository root directory
|
||||
|
||||
cd http_service/models/;
|
||||
mkdir -p models
|
||||
cd models
|
||||
|
||||
# Remove the models and any old data
|
||||
rm defectenhancementtaskmodel* || true;
|
||||
|
@ -41,10 +42,9 @@ bugbug-train --limit 30000 --no-download backout
|
|||
# This part duplicates the http service Dockerfiles because we cannot easily spin Docker containers
|
||||
# up on Taskcluster
|
||||
cd ../
|
||||
pip install -r requirements.txt
|
||||
cd ../
|
||||
pip install ./http_service
|
||||
pwd
|
||||
ls http_service/models/
|
||||
ls models
|
||||
|
||||
export REDIS_URL=redis://localhost:6379/4
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче