Add SAN and coverage builds in Azure Pipelines (#253)

* Test: Add a coverage build to VSTS, on Microsoft-hosted pool

* Remove .circleci

* Restore main CI yml

* Split publish/download build into separate templates

* Add a no-sgx VSTS CI config

* Fix yml typo

* Remove CircleCI badge

* Early venv creation is part of publish_build

* Add debugging steps to Coverage build

* Roll static_checks into Coverage build

* Do our debugging ON FAILURE

* Restore missing label

* Add docker options, dedicated SigIll hunting test

* Explicitly report missing RDSEED support

* Remove the same typo, again

* Add CMake option for software entropy, enable by default

* yaml fixes

* Re-enable SAN build

* Duh

* Print sanity check of entropy implementation

* cat /etc/hosts

* Print CMake config

* Old school

* Publish all ports

* Run all coverage and SAN tests

* Remove prints

* Use gdb to print backtrace on SIG ILL

* Stop building Debug

* Quit when gdb is finished

* SOFTWARE_ENTROPY off by default

* cpuinfo on the SIG ILL agent

* Just run keyexchange directly

* TEMP: Disable other jobs

* Re-enable main jobs, cpuinfo in build

* Push coverage

* Remove SOFTWARE_ENTROPY option

* Push from correct workingDirectory

* Revert "Remove CircleCI badge"

This reverts commit 5202fe7c95.

* Revert "Remove .circleci"

This reverts commit 413883d9e7.

* Add comment for --publish-all option

* Print CMake config after building

* Use yaml anchor to avoid duplicating exclude paths

* Remove anchors - unsupported by VSTS

* Move comment to first use of --publish-all

* get_data() is used as MbedtlsEntropy, so return an MbedtlsEntropy

* Revert "get_data() is used as MbedtlsEntropy, so return an MbedtlsEntropy"

This reverts commit 03bc3d0750.

* Print actual error messages

* Update entropy.h

* inline

* Remove debug hack
This commit is contained in:
Eddy Ashton 2019-07-17 17:57:57 +01:00 коммит произвёл GitHub
Родитель 7f2178036c
Коммит 1c56672439
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
8 изменённых файлов: 185 добавлений и 46 удалений

112
.vsts-ci-no-sgx.yml Normal file
Просмотреть файл

@ -0,0 +1,112 @@
trigger:
batch: true
branches:
include:
- "ci/*"
paths:
exclude:
- 'README.md'
- 'CCF-TECHNICAL-REPORT.pdf'
- 'Dockerfile'
- 'Doxyfile'
- 'THIRD_PARTY_NOTICES.txt'
- 'getting_started/'
- 'sphinx/'
- '.circleci/'
- '.github/'
- '.vsts-gh-pages.yml'
- 'LICENSE'
pr:
autoCancel: true
branches:
include:
- master
paths:
include:
- '*'
exclude:
- 'README.md'
- 'CCF-TECHNICAL-REPORT.pdf'
- 'Dockerfile'
- 'Doxyfile'
- 'THIRD_PARTY_NOTICES.txt'
- 'getting_started/'
- 'sphinx/'
- '.circleci/'
- '.github/'
- '.vsts-gh-pages.yml'
- 'LICENSE'
schedules:
- cron: "0 3 * * Mon-Fri"
displayName: Daily morning build
branches:
include:
- master
always: true
jobs:
- job: Coverage
pool:
vmImage: 'ubuntu-16.04'
container:
image: ccfciteam/ccf-ci-18.04:latest
# Docker image is created with constrained network, unable to communicate
# with other nodes in end-to-end tests. Publishing the ports allows the
# nodes to communicate.
options: --publish-all
steps:
- checkout: self
clean: true
submodules: true
# Container initialization is expensive, so don't start separate container
# for static_checks. Piggy back them here
- script: find . -regex ".*\.sh$" | xargs shellcheck -s bash -e SC2044,SC2002,SC1091
displayName: 'Shell Check'
- script: python3.7 notice-check.py
displayName: 'Check copyright notices'
- script: ./check-format.sh src
displayName: 'Check C++ code format'
- script: |
python3.7 -m venv env
source env/bin/activate
pip install black
black --check sphinx/ tests/ notice-check.py
displayName: 'Check Python code format'
# Actual coverage build steps starts here
- template: .vsts-ci-templates/build.yml
parameters:
cmake_args: '-DTARGET=virtual -DBUILD_SMALLBANK=OFF -DCOVERAGE=ON'
- template: .vsts-ci-templates/test.yml
parameters:
suite_name_suffix: 'coverage'
- script: ../tests/coverage/generate_coverage.sh
displayName: 'Push coverage'
workingDirectory: build
- job: SAN
pool:
vmImage: 'ubuntu-16.04'
container:
image: ccfciteam/ccf-ci-18.04:latest
options: --publish-all
steps:
- checkout: self
clean: true
submodules: true
- template: .vsts-ci-templates/build.yml
parameters:
cmake_args: '-DTARGET=virtual -DBUILD_SMALLBANK=OFF -DSAN=ON'
- template: .vsts-ci-templates/test.yml
parameters:
suite_name_suffix: 'san'

Просмотреть файл

@ -1,23 +1,17 @@
parameters:
cmake_args: ''
artifact_name: ''
steps:
- script: |
cat /proc/cpuinfo
displayName: /proc/cpuinfo
condition: succeededOrFailed()
- task: CMake@1
displayName: CMake
inputs:
cmakeArgs: '-GNinja -DTARGET=sgx .. ${{ parameters.cmake_args }}'
cmakeArgs: '-L -GNinja -DTARGET=sgx .. ${{ parameters.cmake_args }}'
- script: ninja
displayName: Ninja
workingDirectory: build
- script: |
tar -cavf artifact.tar.gz cchost CTestTestfile.cmake json_schema genesisgenerator merkle_mem raft_driver tests.sh *_test *_bench *client *.so.signed
displayName: Compress build artifact
workingDirectory: build
- task: PublishPipelineArtifact@0
inputs:
artifactName: ${{ parameters.artifact_name }}
targetPath: build/artifact.tar.gz

Просмотреть файл

@ -0,0 +1,18 @@
parameters:
artifact_name: ''
steps:
- task: DownloadPipelineArtifact@1
inputs:
artifactName: ${{ parameters.artifact_name }}
targetPath: $(Build.SourcesDirectory)/build
- script: |
tar -xaf artifact.tar.gz
displayName: Uncompress build artifact
workingDirectory: build
- task: CMake@1
displayName: Re-generate test metadata
inputs:
cmakeArgs: '..'

Просмотреть файл

@ -0,0 +1,13 @@
parameters:
artifact_name: ''
steps:
- script: |
tar -cavf artifact.tar.gz cchost CTestTestfile.cmake json_schema genesisgenerator merkle_mem raft_driver tests.sh *_test *_bench *client *.so.signed
displayName: Compress build artifact
workingDirectory: build
- task: PublishPipelineArtifact@0
inputs:
artifactName: ${{ parameters.artifact_name }}
targetPath: build/artifact.tar.gz

Просмотреть файл

@ -1,28 +1,8 @@
parameters:
suite_name_suffix: ''
ctest_filter: ''
artifact_name: ''
steps:
- script: |
rm -rf build
displayName: Remove old build directory
- task: DownloadPipelineArtifact@1
inputs:
artifactName: ${{ parameters.artifact_name }}
targetPath: $(Build.SourcesDirectory)/build
- script: |
tar -xaf artifact.tar.gz
displayName: Uncompress build artifact
workingDirectory: build
- task: CMake@1
displayName: Re-generate test metadata
inputs:
cmakeArgs: '..'
- script: |
./tests.sh -VV --timeout 240 --no-compress-output -T Test ${{ parameters.ctest_filter }}
displayName: CTest

Просмотреть файл

@ -56,6 +56,8 @@ jobs:
- template: .vsts-ci-templates/build.yml
parameters:
cmake_args: '-DBUILD_SMALLBANK=OFF'
- template: .vsts-ci-templates/publish_build.yml
parameters:
artifact_name: build_results
- job: ACC_1804_SGX_quick_tests
@ -66,11 +68,13 @@ jobs:
- checkout: self
clean: true
submodules: true
- template: .vsts-ci-templates/download_build.yml
parameters:
artifact_name: build_results
- template: .vsts-ci-templates/test.yml
parameters:
ctest_filter: '-LE "perf|end_to_end"'
suite_name_suffix: ' SGX quick tests'
artifact_name: build_results
- job: ACC_1804_SGX_e2e_tests_A
pool: Ubuntu-1804-SGX-Azure
@ -80,11 +84,13 @@ jobs:
- checkout: self
clean: true
submodules: true
- template: .vsts-ci-templates/download_build.yml
parameters:
artifact_name: build_results
- template: .vsts-ci-templates/test.yml
parameters:
ctest_filter: '-L end_to_end -I 0,,2'
suite_name_suffix: ' SGX end to end tests A'
artifact_name: build_results
- job: ACC_1804_SGX_e2e_tests_B
pool: Ubuntu-1804-SGX-Azure
@ -94,11 +100,13 @@ jobs:
- checkout: self
clean: true
submodules: true
- template: .vsts-ci-templates/download_build.yml
parameters:
artifact_name: build_results
- template: .vsts-ci-templates/test.yml
parameters:
ctest_filter: '-L end_to_end -I 1,,2'
suite_name_suffix: ' SGX end to end tests B'
artifact_name: build_results
- job: ACC_1804_SGX_perf_build
pool: Ubuntu-1804-SGX-Azure
@ -109,6 +117,8 @@ jobs:
- template: .vsts-ci-templates/build.yml
parameters:
cmake_args: '-DBUILD_SMALLBANK=ON -DSERVICE_IDENTITY_CURVE_CHOICE=secp256k1_bitcoin'
- template: .vsts-ci-templates/publish_build.yml
parameters:
artifact_name: perf_build_results
- job: ACC_1804_SGX_perf_build_A
@ -119,11 +129,13 @@ jobs:
- checkout: self
clean: true
submodules: true
- template: .vsts-ci-templates/download_build.yml
parameters:
artifact_name: perf_build_results
- template: .vsts-ci-templates/test.yml
parameters:
suite_name_suffix: ' SGX Performance A'
ctest_filter: '-L perf -I 0,,2'
artifact_name: perf_build_results
- template: .vsts-ci-templates/push_perf_data.yml
- job: ACC_1804_SGX_perf_build_B
@ -134,9 +146,11 @@ jobs:
- checkout: self
clean: true
submodules: true
- template: .vsts-ci-templates/download_build.yml
parameters:
artifact_name: perf_build_results
- template: .vsts-ci-templates/test.yml
parameters:
suite_name_suffix: ' SGX Performance B'
ctest_filter: '-L perf -I 1,,2'
artifact_name: perf_build_results
- template: .vsts-ci-templates/push_perf_data.yml

Просмотреть файл

@ -60,7 +60,7 @@ namespace tls
void* get_data() override
{
return &entropy;
return &drbg;
}
};

Просмотреть файл

@ -39,6 +39,14 @@ namespace tls
using HashBytes = std::vector<uint8_t>;
inline std::string str_err(int err)
{
constexpr size_t len = 100;
char buf[len];
mbedtls_strerror(err, buf, len);
return std::string(buf);
}
// 2 implementations of secp256k1 are available - mbedtls and bitcoin. Either
// can be asked for explicitly via the CurveImpl enum. For cases where we
// receive a raw 256k1 key/signature/cert only, this flag determines which
@ -173,7 +181,7 @@ namespace tls
if (rc != 0)
{
throw std::logic_error(
"mbedtls_ecp_point_write_binary failed: " + std::to_string(rc));
"mbedtls_ecp_point_write_binary failed: " + str_err(rc));
}
rc = secp256k1_ec_pubkey_parse(bc_ctx, bc_pub, pub_buf, pub_len);
@ -236,7 +244,7 @@ namespace tls
if (rc != 0)
{
throw std::logic_error(
"Could not set up EdDSA context: " + std::to_string(rc));
"Could not set up EdDSA context: " + str_err(rc));
}
rc = mbedtls_eddsa_genkey(
@ -247,7 +255,7 @@ namespace tls
if (rc != 0)
{
throw std::logic_error(
"Could not generate EdDSA keypair: " + std::to_string(rc));
"Could not generate EdDSA keypair: " + str_err(rc));
}
break;
}
@ -259,7 +267,7 @@ namespace tls
if (rc != 0)
{
throw std::logic_error(
"Could not set up ECDSA context: " + std::to_string(rc));
"Could not set up ECDSA context: " + str_err(rc));
}
rc = mbedtls_ecp_gen_key(
@ -267,7 +275,7 @@ namespace tls
if (rc != 0)
{
throw std::logic_error(
"Could not generate ECDSA keypair: " + std::to_string(rc));
"Could not generate ECDSA keypair: " + str_err(rc));
}
break;
}
@ -278,7 +286,7 @@ namespace tls
{
throw std::logic_error(
"Created key and received unexpected type: " +
std::to_string(actual_ec) + " != " + std::to_string(ec));
std::to_string(actual_ec) + " != " + str_err(ec));
}
}
@ -544,7 +552,7 @@ namespace tls
if (rc != 0)
{
throw std::logic_error(
"Could not extract raw private key: " + std::to_string(rc));
"Could not extract raw private key: " + str_err(rc));
}
if (secp256k1_ec_seckey_verify(bc_ctx, c4_priv) != 1)
@ -620,7 +628,7 @@ namespace tls
int rc = mbedtls_pk_parse_key(key.get(), pemPk.p, pemPk.n, pw.p, pw.n);
if (rc != 0)
{
throw std::logic_error("Could not parse key: " + std::to_string(rc));
throw std::logic_error("Could not parse key: " + str_err(rc));
}
const auto curve = get_ec_from_context(*key);