2019-10-01 22:48:22 +03:00
|
|
|
"""PyTest configuration."""
|
2019-10-09 00:00:11 +03:00
|
|
|
|
2020-04-29 19:43:50 +03:00
|
|
|
from google.cloud import bigquery
|
|
|
|
from google.cloud import storage
|
|
|
|
import os
|
2020-03-21 00:41:36 +03:00
|
|
|
import pytest
|
2020-04-29 19:43:50 +03:00
|
|
|
import random
|
|
|
|
import string
|
2020-03-21 00:41:36 +03:00
|
|
|
|
2020-04-30 02:13:03 +03:00
|
|
|
|
|
|
|
TEST_BUCKET = "bigquery-etl-integration-test-bucket"
|
|
|
|
|
|
|
|
|
2019-10-09 00:00:11 +03:00
|
|
|
pytest_plugins = [
|
|
|
|
"bigquery_etl.pytest_plugin.sql",
|
|
|
|
"bigquery_etl.pytest_plugin.udf",
|
|
|
|
"bigquery_etl.pytest_plugin.script_lint.black",
|
|
|
|
"bigquery_etl.pytest_plugin.script_lint.docstyle",
|
|
|
|
"bigquery_etl.pytest_plugin.script_lint.flake8",
|
|
|
|
"bigquery_etl.pytest_plugin.script_lint.mypy",
|
|
|
|
]
|
2020-03-21 00:41:36 +03:00
|
|
|
|
|
|
|
|
|
|
|
def pytest_collection_modifyitems(config, items):
|
|
|
|
keywordexpr = config.option.keyword
|
|
|
|
markexpr = config.option.markexpr
|
|
|
|
if keywordexpr or markexpr:
|
|
|
|
return
|
|
|
|
|
2020-04-30 02:13:03 +03:00
|
|
|
skip_integration = pytest.mark.skip(reason="integration marker not selected")
|
2020-03-21 00:41:36 +03:00
|
|
|
|
|
|
|
for item in items:
|
2020-04-30 02:13:03 +03:00
|
|
|
if "integration" in item.keywords:
|
2020-03-25 22:17:04 +03:00
|
|
|
item.add_marker(skip_integration)
|
2020-04-29 19:43:50 +03:00
|
|
|
|
2020-04-30 02:13:03 +03:00
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def project_id():
|
2020-04-30 02:15:29 +03:00
|
|
|
"""Provide a BigQuery project ID."""
|
2020-04-30 02:13:03 +03:00
|
|
|
# GOOGLE_PROJECT_ID needs to be set for integration tests to run
|
|
|
|
project_id = os.environ["GOOGLE_PROJECT_ID"]
|
|
|
|
|
|
|
|
return project_id
|
|
|
|
|
|
|
|
|
2020-04-29 19:43:50 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def bigquery_client():
|
2020-04-30 02:15:29 +03:00
|
|
|
"""Provide a BigQuery client."""
|
2020-04-30 02:13:03 +03:00
|
|
|
project_id = os.environ["GOOGLE_PROJECT_ID"]
|
|
|
|
return bigquery.Client(project_id)
|
2020-04-29 19:43:50 +03:00
|
|
|
|
2020-04-30 02:13:03 +03:00
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def temporary_dataset():
|
2020-04-30 02:15:29 +03:00
|
|
|
"""Fixture for creating a random temporary BigQuery dataset."""
|
2020-04-29 19:43:50 +03:00
|
|
|
# generate a random test dataset to avoid conflicts when running tests in parallel
|
2020-04-30 02:13:03 +03:00
|
|
|
test_dataset = "test_" + "".join(
|
|
|
|
random.choice(string.ascii_lowercase) for i in range(12)
|
|
|
|
)
|
|
|
|
|
|
|
|
project_id = os.environ["GOOGLE_PROJECT_ID"]
|
|
|
|
client = bigquery.Client(project_id)
|
|
|
|
client.create_dataset(test_dataset)
|
|
|
|
|
|
|
|
yield test_dataset
|
2020-04-29 19:43:50 +03:00
|
|
|
|
2020-04-30 02:13:03 +03:00
|
|
|
# cleanup and remove temporary dataset
|
|
|
|
client.delete_dataset(test_dataset, delete_contents=True, not_found_ok=True)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def test_bucket():
|
2020-04-30 02:15:29 +03:00
|
|
|
"""Provide a test bucket instance."""
|
2020-04-30 02:13:03 +03:00
|
|
|
storage_client = storage.Client()
|
|
|
|
bucket = storage_client.bucket(TEST_BUCKET)
|
|
|
|
|
|
|
|
yield bucket
|
|
|
|
|
|
|
|
# cleanup test bucket
|
|
|
|
bucket.delete_blobs(bucket.list_blobs())
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2020-04-29 19:43:50 +03:00
|
|
|
def storage_client():
|
2020-04-30 02:15:29 +03:00
|
|
|
"""Provide a client instance for cloud storage."""
|
2020-04-30 02:13:03 +03:00
|
|
|
yield storage.Client()
|