Pretty print dataset metadata
This commit is contained in:
Родитель
bffcf3fc45
Коммит
8af044d844
|
@ -151,7 +151,7 @@ def publish_all_datasets_metadata(table_metadata, output_file):
|
|||
logging.info(f"Write metadata to {output_file}")
|
||||
|
||||
with smart_open.open(output_file, "w") as fout:
|
||||
fout.write(json.dumps(metadata_json))
|
||||
fout.write(json.dumps(metadata_json, indent=4))
|
||||
|
||||
|
||||
def publish_table_metadata(table_metadata, bucket):
|
||||
|
@ -161,7 +161,7 @@ def publish_table_metadata(table_metadata, bucket):
|
|||
|
||||
logging.info(f"Write metadata to {output_file}")
|
||||
with smart_open.open(output_file, "w") as fout:
|
||||
fout.write(json.dumps(metadata.files_metadata_to_json()))
|
||||
fout.write(json.dumps(metadata.files_metadata_to_json(), indent=4))
|
||||
|
||||
|
||||
def main():
|
||||
|
@ -189,12 +189,8 @@ def main():
|
|||
publish_table_metadata(gcs_table_metadata, args.target_bucket)
|
||||
else:
|
||||
print(
|
||||
"""
|
||||
Invalid target: {}, target must be a directory with
|
||||
structure /<dataset>/<table>/metadata.yaml.
|
||||
""".format(
|
||||
args.target
|
||||
)
|
||||
f"Invalid target: {args.target}, target must be a directory with"
|
||||
"structure /<dataset>/<table>/metadata.yaml."
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -33,12 +33,6 @@ elif [ "$1" = "query" ]; then
|
|||
# For queries with public_json set in the metadata, the results will be
|
||||
# exported as JSON to Cloud storage.
|
||||
python script/publish_public_data_json --query_file="${@: -1}" ${@:1:$#-1}
|
||||
elif [ "$1" = "publish_gcs_metadata" ]; then
|
||||
# For an invocation like:
|
||||
# publish_gcs_metadata [options]
|
||||
# we dispatch to a script that publishes metadata files to GCS for datasets
|
||||
# that have previsously been made public on GCS.
|
||||
exec script/publish_public_data_gcs_metadata ${@:2}
|
||||
elif [ "$XCOM_PUSH" = "true" ]; then
|
||||
# KubernetesPodOperator will extract the contents of /airflow/xcom/return.json as an xcom
|
||||
# if the xcom_push parameter is true
|
||||
|
|
|
@ -1 +1,23 @@
|
|||
{"test": {"non_incremental_query": {"v1": {"friendly_name": "Test table for a non-incremental query", "description": "Test table for a non-incremental query", "incremental": false, "incremental_export": false, "review_link": "https://bugzilla.mozilla.org/show_bug.cgi?id=1999999", "files_uri": "http://test.endpoint.mozilla.com/api/v1/tables/test/non_incremental_query/v1/files"}}, "incremental_query": {"v1": {"friendly_name": "Test table for an incremental query", "description": "Test table for an incremental query", "incremental": true, "incremental_export": true, "files_uri": "http://test.endpoint.mozilla.com/api/v1/tables/test/incremental_query/v1/files"}}}}
|
||||
{
|
||||
"test": {
|
||||
"non_incremental_query": {
|
||||
"v1": {
|
||||
"friendly_name": "Test table for a non-incremental query",
|
||||
"description": "Test table for a non-incremental query",
|
||||
"incremental": false,
|
||||
"incremental_export": false,
|
||||
"review_link": "https://bugzilla.mozilla.org/show_bug.cgi?id=1999999",
|
||||
"files_uri": "https://test.endpoint.mozilla.com/api/v1/tables/test/non_incremental_query/v1/files"
|
||||
}
|
||||
},
|
||||
"incremental_query": {
|
||||
"v1": {
|
||||
"friendly_name": "Test table for an incremental query",
|
||||
"description": "Test table for an incremental query",
|
||||
"incremental": true,
|
||||
"incremental_export": true,
|
||||
"files_uri": "https://test.endpoint.mozilla.com/api/v1/tables/test/incremental_query/v1/files"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1 +1,5 @@
|
|||
{"2020-03-15": ["http://test.endpoint.mozilla.com/api/v1/tables/test/incremental_query/v1/files/2020-03-15/000000000001.json.gz"]}
|
||||
{
|
||||
"2020-03-15": [
|
||||
"https://test.endpoint.mozilla.com/api/v1/tables/test/incremental_query/v1/files/2020-03-15/000000000001.json.gz"
|
||||
]
|
||||
}
|
|
@ -1 +1,3 @@
|
|||
["http://test.endpoint.mozilla.com/api/v1/tables/test/non_incremental_query/v1/files/000000000000.json.gz"]
|
||||
[
|
||||
"https://test.endpoint.mozilla.com/api/v1/tables/test/non_incremental_query/v1/files/000000000000.json.gz"
|
||||
]
|
|
@ -11,7 +11,7 @@ class TestPublishGcsMetadata(object):
|
|||
test_bucket = "test-bucket"
|
||||
project_id = "test-project-id"
|
||||
api_version = "v1"
|
||||
endpoint = "http://test.endpoint.mozilla.com/"
|
||||
endpoint = "https://test.endpoint.mozilla.com/"
|
||||
sql_dir = "tests/public_data/test_sql/"
|
||||
|
||||
mock_blob1 = Mock()
|
||||
|
@ -194,7 +194,7 @@ class TestPublishGcsMetadata(object):
|
|||
|
||||
with open("tests/public_data/all_datasets.json") as f:
|
||||
expected_json = json.load(f)
|
||||
mock_out.write.assert_called_with(json.dumps(expected_json))
|
||||
mock_out.write.assert_called_with(json.dumps(expected_json, indent=4))
|
||||
|
||||
def test_publish_table_metadata(self):
|
||||
files1 = [
|
||||
|
@ -226,7 +226,7 @@ class TestPublishGcsMetadata(object):
|
|||
|
||||
mock_out.write.assert_has_calls(
|
||||
[
|
||||
call(json.dumps(expected_non_incremental_query_json)),
|
||||
call(json.dumps(expected_incremental_query_json)),
|
||||
call(json.dumps(expected_non_incremental_query_json, indent=4)),
|
||||
call(json.dumps(expected_incremental_query_json, indent=4)),
|
||||
]
|
||||
)
|
||||
|
|
Загрузка…
Ссылка в новой задаче