Build tools: build configurations, vcpkg support, vcpkg port file for OpenTelemetry, use submodules for CMake deps (#377)

This commit is contained in:
Max Golovanov 2020-11-30 14:47:41 -08:00 коммит произвёл GitHub
Родитель 28efe53f8a
Коммит 2a516addb6
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
89 изменённых файлов: 1480 добавлений и 1089 удалений

3
.bazelignore Normal file
Просмотреть файл

@ -0,0 +1,3 @@
third_party
tools
out

6
.gitattributes поставляемый
Просмотреть файл

@ -37,6 +37,6 @@ LICENSE* text
*.pdf binary
*.rtf binary
## Self-reference =)
.gitignore text
.gitattributes text
## git files
.gitignore text eol=lf
.gitattributes text eol=lf

32
.github/workflows/ci.yml поставляемый
Просмотреть файл

@ -12,6 +12,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_cmake.sh
@ -26,6 +28,8 @@ jobs:
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_ci_environment.sh
@ -44,6 +48,8 @@ jobs:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_ci_environment.sh
@ -56,6 +62,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_cmake.sh
@ -68,6 +76,8 @@ jobs:
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_ci_environment.sh
@ -83,6 +93,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_cmake.sh
@ -96,6 +108,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_cmake.sh
@ -109,6 +123,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_cmake.sh
@ -122,6 +138,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_cmake.sh
@ -135,6 +153,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_cmake.sh
@ -148,6 +168,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_cmake.sh
@ -178,6 +200,8 @@ jobs:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: run tests
run: ./ci/do_ci.sh bazel.test
@ -186,6 +210,8 @@ jobs:
runs-on: windows-2019
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
./ci/setup_windows_cmake.ps1
@ -201,6 +227,8 @@ jobs:
runs-on: windows-2019
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: ./ci/install_windows_bazelisk.ps1
- name: run tests
@ -211,6 +239,8 @@ jobs:
runs-on: windows-2019
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
./ci/setup_windows_cmake.ps1
@ -223,6 +253,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: setup
run: |
sudo ./ci/setup_cmake.sh

6
.gitignore поставляемый
Просмотреть файл

@ -37,3 +37,9 @@
# Mac
.DS_Store
# Output directories
/out
/out.*
# Indicator that the tools were deployed
.buildtools

28
.gitmodules поставляемый
Просмотреть файл

@ -1,3 +1,29 @@
[submodule "third_party/prometheus-cpp"]
path = third_party/prometheus-cpp
url = https://github.com/jupp0r/prometheus-cpp.git
url = https://github.com/jupp0r/prometheus-cpp
branch = master
[submodule "tools/vcpkg"]
path = tools/vcpkg
url = https://github.com/Microsoft/vcpkg
branch = master
[submodule "third_party/ms-gsl"]
path = third_party/ms-gsl
url = https://github.com/microsoft/GSL
branch = master
[submodule "third_party/googletest"]
path = third_party/googletest
url = https://github.com/google/googletest
branch = master
[submodule "third_party/benchmark"]
path = third_party/benchmark
url = https://github.com/google/benchmark
branch = master
[submodule "third_party/opentelemetry-proto"]
path = third_party/opentelemetry-proto
url = https://github.com/open-telemetry/opentelemetry-proto
branch = master

Просмотреть файл

@ -15,20 +15,23 @@ option(WITH_OTLP "Whether to include the OpenTelemetry Protocol in the SDK" OFF)
option(WITH_PROMETHEUS "Whether to include the Prometheus Client in the SDK"
OFF)
option(WITH_TESTS "Whether to enable tests" ON)
option(BUILD_TESTING "Whether to enable tests" ON)
option(WITH_EXAMPLES "Whether to build examples" ON)
set(WITH_PROTOBUF OFF)
if(WITH_OTLP)
set(WITH_PROTOBUF ON)
endif()
if(WITH_TESTS)
include(CTest)
endif()
find_package(Threads)
function(install_windows_deps)
# Bootstrap vcpkg from CMake and auto-install deps in case if we are missing
# deps on Windows
message("Installing build tools and dependencies...")
execute_process(
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/tools/setup-buildtools.cmd)
set(CMAKE_TOOLCHAIN_FILE
${CMAKE_CURRENT_SOURCE_DIR}/tools/vcpkg/scripts/buildsystems/vcpkg.cmake)
endfunction()
if(MSVC)
# Options for Visual C++ compiler: /Zc:__cplusplus - report an updated value
# for recent C++ language standards. Without this option MSVC returns the
@ -36,23 +39,67 @@ if(MSVC)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Zc:__cplusplus")
endif()
if(WITH_OTLP)
set(WITH_PROTOBUF ON)
endif()
if(WITH_PROTOBUF)
set(protobuf_MODULE_COMPATIBLE ON)
find_package(Protobuf CONFIG NAMES protobuf)
# Older versions of protobuf don't use cmake config files.
find_package(Protobuf REQUIRED)
if(NOT protobuf_FOUND)
if(WIN32)
install_windows_deps()
endif()
find_package(Protobuf REQUIRED)
if(WIN32)
# Always use x64 protoc.exe
if(NOT EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
set(Protobuf_PROTOC_EXECUTABLE
${CMAKE_CURRENT_SOURCE_DIR}/tools/vcpkg/packages/protobuf_x64-windows/tools/protobuf/protoc.exe
)
endif()
endif()
# Latest Protobuf uses mixed case instead of uppercase
set(PROTOBUF_PROTOC_EXECUTABLE ${Protobuf_PROTOC_EXECUTABLE})
endif()
message("PROTOBUF_PROTOC_EXECUTABLE=${PROTOBUF_PROTOC_EXECUTABLE}")
endif()
if(WITH_OTLP)
include(third_party/opentelemetry-proto/Protobuf.cmake)
include(cmake/opentelemetry-proto.cmake)
endif()
list(APPEND CMAKE_PREFIX_PATH "${CMAKE_BINARY_DIR}")
if(BUILD_TESTING)
find_package(GTest REQUIRED)
find_package(benchmark REQUIRED)
include(CTest)
if(EXISTS ${CMAKE_BINARY_DIR}/lib/libgtest.a)
# Prefer GTest from build tree. GTest is not always working with
# CMAKE_PREFIX_PATH
set(GTEST_INCLUDE_DIRS
${CMAKE_CURRENT_SOURCE_DIR}/third_party/googletest/googletest/include
${CMAKE_CURRENT_SOURCE_DIR}/third_party/googletest/googlemock/include)
set(GTEST_BOTH_LIBRARIES
${CMAKE_BINARY_DIR}/lib/libgtest.a
${CMAKE_BINARY_DIR}/lib/libgtest_main.a
${CMAKE_BINARY_DIR}/lib/libgmock.a)
elseif(WIN32)
# Make sure we are always bootsrapped with vcpkg on Windows
find_package(GTest)
if(NOT GTEST_FOUND)
install_windows_deps()
find_package(GTest REQUIRED)
endif()
else()
# Prefer GTest installed by OS distro, brew or vcpkg package manager
find_package(GTest REQUIRED)
endif()
include_directories(SYSTEM ${GTEST_INCLUDE_DIRS})
message("GTEST_INCLUDE_DIRS = ${GTEST_INCLUDE_DIRS}")
message("GTEST_BOTH_LIBRARIES = ${GTEST_BOTH_LIBRARIES}")
enable_testing()
# Benchmark respects the CMAKE_PREFIX_PATH
find_package(benchmark CONFIG REQUIRED)
endif()
include_directories(api/include)

51
CMakeSettings.json Normal file
Просмотреть файл

@ -0,0 +1,51 @@
{
"configurations": [
{
"name": "nostd-x64-Debug",
"generator": "Ninja",
"configurationType": "Debug",
"inheritEnvironments": [ "msvc_x64_x64" ],
"buildRoot": "${projectDir}\\out\\vs2019\\${name}",
"installRoot": "${projectDir}\\out\\vs2019\\${name}\\install",
"cmakeCommandArgs": "",
"buildCommandArgs": "",
"ctestCommandArgs": "",
"variables": [
{
"name": "WITH_OTLP",
"value": "True",
"type": "BOOL"
},
{
"name": "WITH_EXAMPLES",
"value": "true",
"type": "BOOL"
}
]
},
{
"name": "nostd-x64-Release",
"generator": "Ninja",
"configurationType": "RelWithDebInfo",
"inheritEnvironments": [ "msvc_x64_x64" ],
"buildRoot": "${projectDir}\\out\\vs2019\\${name}",
"installRoot": "${projectDir}\\out\\vs2019\\${name}\\install",
"cmakeCommandArgs": "",
"buildCommandArgs": "",
"ctestCommandArgs": "",
"cmakeToolchain": "",
"variables": [
{
"name": "WITH_OTLP",
"value": "True",
"type": "BOOL"
},
{
"name": "WITH_EXAMPLES",
"value": "true",
"type": "BOOL"
}
]
}
]
}

Просмотреть файл

@ -82,7 +82,7 @@ http_archive(
http_archive(
name = "github_nlohmann_json",
build_file = "//third_party/json:nlohmann_json.BUILD",
build_file = "//bazel:nlohmann_json.BUILD",
sha256 = "69cc88207ce91347ea530b227ff0776db82dcb8de6704e1a3d74f4841bc651cf",
urls = [
"https://github.com/nlohmann/json/releases/download/v3.6.1/include.zip",
@ -106,7 +106,7 @@ prometheus_cpp_repositories()
# libcurl - An optional dependency we pull in for tests.
http_archive(
name = "curl",
build_file = "@//third_party:curl.BUILD",
build_file = "@//bazel:curl.BUILD",
sha256 = "ba98332752257b47b9dea6d8c0ad25ec1745c20424f1dd3ff2c99ab59e97cf91",
strip_prefix = "curl-7.73.0",
urls = ["https://curl.haxx.se/download/curl-7.73.0.tar.gz"],

Просмотреть файл

@ -4,5 +4,8 @@ foreach(testname context_test runtime_context_test)
add_executable(${testname} "${testname}.cc")
target_link_libraries(${testname} ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_api)
gtest_add_tests(TARGET ${testname} TEST_PREFIX context. TEST_LIST ${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX context.
TEST_LIST ${testname})
endforeach()

Просмотреть файл

@ -3,5 +3,7 @@ include(GoogleTest)
add_executable(timestamp_test timestamp_test.cc)
target_link_libraries(timestamp_test ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_api)
gtest_add_tests(TARGET timestamp_test TEST_PREFIX trace. TEST_LIST
timestamp_test)
gtest_add_tests(
TARGET timestamp_test
TEST_PREFIX trace.
TEST_LIST timestamp_test)

Просмотреть файл

@ -2,5 +2,8 @@ foreach(testname logger_provider_test logger_test)
add_executable(${testname} "${testname}.cc")
target_link_libraries(${testname} ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_api)
gtest_add_tests(TARGET ${testname} TEST_PREFIX logs. TEST_LIST ${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX logs.
TEST_LIST ${testname})
endforeach()

Просмотреть файл

@ -2,5 +2,8 @@ foreach(testname noop_instrument_test meter_provider_test noop_metrics_test)
add_executable(${testname} "${testname}.cc")
target_link_libraries(${testname} ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_api)
gtest_add_tests(TARGET ${testname} TEST_PREFIX metrics. TEST_LIST ${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX metrics.
TEST_LIST ${testname})
endforeach()

Просмотреть файл

@ -5,5 +5,8 @@ foreach(testname function_ref_test string_view_test unique_ptr_test
add_executable(${testname} "${testname}.cc")
target_link_libraries(${testname} ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_api)
gtest_add_tests(TARGET ${testname} TEST_PREFIX nostd. TEST_LIST ${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX nostd.
TEST_LIST ${testname})
endforeach()

Просмотреть файл

@ -4,5 +4,7 @@ add_executable(dynamic_load_test dynamic_load_test.cc)
target_link_libraries(dynamic_load_test ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_api)
target_link_libraries(dynamic_load_test ${CMAKE_DL_LIBS})
gtest_add_tests(TARGET dynamic_load_test TEST_PREFIX plugin. TEST_LIST
dynamic_load_test)
gtest_add_tests(
TARGET dynamic_load_test
TEST_PREFIX plugin.
TEST_LIST dynamic_load_test)

Просмотреть файл

@ -12,8 +12,10 @@ foreach(
add_executable(api_${testname} "${testname}.cc")
target_link_libraries(api_${testname} ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_api)
gtest_add_tests(TARGET api_${testname} TEST_PREFIX trace. TEST_LIST
api_${testname})
gtest_add_tests(
TARGET api_${testname}
TEST_PREFIX trace.
TEST_LIST api_${testname})
endforeach()
add_executable(span_id_benchmark span_id_benchmark.cc)

Просмотреть файл

@ -2,5 +2,8 @@ foreach(testname http_text_format_test)
add_executable(${testname} "${testname}.cc")
target_link_libraries(${testname} ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_api)
gtest_add_tests(TARGET ${testname} TEST_PREFIX trace. TEST_LIST ${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX trace.
TEST_LIST ${testname})
endforeach()

Просмотреть файл

Просмотреть файл

Просмотреть файл

@ -56,14 +56,11 @@ elif [[ "$1" == "cmake.exporter.prometheus.test" ]]; then
# apt-get install sudo
# apt-get install zlib1g-dev
# apt-get -y install libcurl4-openssl-dev
cd third_party
git clone https://github.com/jupp0r/prometheus-cpp
cd prometheus-cpp
git checkout v0.9.0
git submodule init
git submodule update
cd third_party/prometheus-cpp
git submodule update --recursive --init
[[ -d _build ]] && rm -rf ./_build
mkdir _build && cd _build
cmake .. -DBUILD_SHARED_LIBS=ON
cmake .. -DBUILD_SHARED_LIBS=ON -DUSE_THIRDPARTY_LIBRARIES=ON
make -j 4
sudo make install

Просмотреть файл

@ -3,6 +3,6 @@
set -e
apt install -y clang-format-8 python3-pip git curl
pip3 install cmake_format==0.6.5
pip3 install cmake_format==0.6.13
curl -L -o /usr/local/bin/buildifier https://github.com/bazelbuild/buildtools/releases/download/2.2.1/buildifier
chmod +x /usr/local/bin/buildifier

Просмотреть файл

Просмотреть файл

@ -37,7 +37,7 @@ add_custom_command(
${TRACE_PROTO}
)
include_directories(SYSTEM "${CMAKE_BINARY_DIR}/generated/third_party/opentelemetry-proto")
include_directories("${GENERATED_PROTOBUF_PATH}")
add_library(opentelemetry_proto OBJECT
${COMMON_PB_CPP_FILE}

1
docker/.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
**/setup-*.sh

Просмотреть файл

@ -0,0 +1,42 @@
FROM ubuntu:14.04
ENV DEBIAN_FRONTEND noninteractive
## Update cache and upgrade image
RUN apt-get -y update && apt-get -y upgrade && apt-get -y dist-upgrade
## Build environment packages
RUN apt-get install -qq -y --ignore-missing \
apt-utils \
automake \
bc \
build-essential \
bzip2 \
cmake \
curl \
git \
libcurl4-openssl-dev \
libssl-dev \
make \
pkg-config \
python \
sudo \
tar \
zip \
unzip \
wget \
zlib1g-dev
RUN mkdir -p /usr/local/bin
## Install cmake since it's an expensive operation and best be done once
COPY ./setup-cmake.sh /usr/local/bin/setup-cmake.sh
RUN chmod +x /usr/local/bin/setup-cmake.sh
RUN /usr/local/bin/setup-cmake.sh
## Install protobuf3 since Ubuntu 14.04 does not have protobuf3
COPY ./setup-protobuf.sh /usr/local/bin/setup-protobuf.sh
RUN chmod +x /usr/local/bin/setup-protobuf.sh
RUN /usr/local/bin/setup-protobuf.sh
# ENTRYPOINT bash
CMD /bin/bash

Просмотреть файл

@ -0,0 +1,38 @@
FROM ubuntu:16.04
ENV DEBIAN_FRONTEND noninteractive
## Update cache and upgrade image
RUN apt-get -y update && apt-get -y upgrade && apt-get -y dist-upgrade
## Build environment packages
RUN apt-get install -qq -y --ignore-missing \
apt-utils \
automake \
build-essential \
bc \
bzip2 \
cmake \
curl \
git \
libcurl4-openssl-dev \
libsqlite3-dev \
libssl-dev \
libtool-bin \
make \
pkg-config \
python \
sudo \
tar \
zip \
unzip \
wget \
zlib1g-dev
## Install cmake since it's an expensive operation and best be done once
RUN mkdir -p /usr/local/bin
COPY ./setup-cmake.sh /usr/local/bin/setup-cmake.sh
RUN chmod +x /usr/local/bin/setup-cmake.sh
RUN /usr/local/bin/setup-cmake.sh
# ENTRYPOINT bash
CMD /bin/bash

Просмотреть файл

@ -0,0 +1,39 @@
FROM ubuntu:18.04
ENV DEBIAN_FRONTEND noninteractive
## Update cache and upgrade image
RUN apt-get -y update && apt-get -y upgrade && apt-get -y dist-upgrade
## Build environment packages
RUN apt-get install -qq -y --ignore-missing \
apt-utils \
automake \
bc \
build-essential \
bzip2 \
cmake \
curl \
git \
libcurl4-openssl-dev \
libssl-dev \
libtool-bin \
make \
pkg-config \
protobuf-compiler \
libprotobuf-dev \
python \
sudo \
tar \
zip \
unzip \
wget \
zlib1g-dev
## Install cmake since it's an expensive operation and best be done once
RUN mkdir -p /usr/local/bin
COPY ./setup-cmake.sh /usr/local/bin/setup-cmake.sh
RUN chmod +x /usr/local/bin/setup-cmake.sh
RUN /usr/local/bin/setup-cmake.sh
# ENTRYPOINT bash
CMD /bin/bash

Просмотреть файл

@ -0,0 +1,39 @@
FROM ubuntu:20.04
ENV DEBIAN_FRONTEND noninteractive
## Update cache and upgrade image
RUN apt-get -y update && apt-get -y upgrade && apt-get -y dist-upgrade
## Build environment packages
RUN apt-get install -qq -y --ignore-missing \
apt-utils \
automake \
bc \
build-essential \
bzip2 \
cmake \
curl \
git \
libcurl4-openssl-dev \
libssl-dev \
libtool-bin \
make \
pkg-config \
protobuf-compiler \
libprotobuf-dev \
python \
sudo \
tar \
zip \
unzip \
wget \
zlib1g-dev
## Install cmake since it's an expensive operation and best be done once
RUN mkdir -p /usr/local/bin
COPY ./setup-cmake.sh /usr/local/bin/setup-cmake.sh
RUN chmod +x /usr/local/bin/setup-cmake.sh
RUN /usr/local/bin/setup-cmake.sh
# ENTRYPOINT bash
CMD /bin/bash

Просмотреть файл

@ -0,0 +1,84 @@
# Building OpenTelemetry C++ SDK with vcpkg
vcpkg is a Microsoft cross-platform open source C++ package manager. Onboarding instructions for Windows, Linux and Mac OS X [available here](https://docs.microsoft.com/en-us/cpp/build/vcpkg). This document assumes that the customer build system is already configured to use vcpkg. OpenTelemetry C++ SDK maintainers provide a build recipe, `opentelemetry` port or CONTROL file for vcpkg. Mainline vcpkg repo is refreshed to point to latest stable open source release of OpenTelemetry C++ SDK.
## Installing opentelemetry package
The following command can be used to install the public open source release:
```console
vcpkg install opentelemetry
```
That's it! The package should be compiled for the current OS.
See instructions below to build the SDK with additional Microsoft-proprietary modules.
## Testing custom dev OpenTelemetry build on Windows
`cmd.exe` command line prompt commands:
```console
git clone --recurse-submodules https://github.com/open-telemetry/opentelemetry-cpp
cd opentelemetry-cpp
vcpkg install --head --overlay-ports=%CD%\tools\ports opentelemetry
```
## Testing custom dev OpenTelemetry build on POSIX (Linux and Mac)
Shell commands:
```console
git clone --recurse-submodules https://github.com/open-telemetry/opentelemetry-cpp
cd opentelemetry-cpp
vcpkg install --head --overlay-ports=`pwd`/tools/ports opentelemetry
```
## Using response files to specify dependencies
vcpkg allows to consolidate parameters passed to vcpkg in a response file. All 3rd party dependencies needed for OpenTelemetry SDK can be described and installed via response file.
Example for Mac:
```console
vcpkg install @tools/ports/opentelemetry/response_file_mac.txt
```
Example for Linux:
```console
vcpkg install @tools/ports/opentelemetry/response_file_linux.txt
```
vcpkg build log files are created in `${VCPKG_INSTALL_DIR}/buildtrees/opentelemetry/build-[err|out].log` . Review the logs in case if you encounter package installation failures.
## Using triplets
In order to enable custom build flags - vcpkg triplets and custom environment variables may be used. Please see [triplets instruction here](https://vcpkg.readthedocs.io/en/latest/users/triplets/). Response file for a custom build, e.g. `response_file_linux_PRODUCTNAME.txt` may specify a custom triplet. For example, custom triplet controls if the library is built as static or dynamic. Default triplets may also be overridden with [custom triplets](https://vcpkg.readthedocs.io/en/latest/examples/overlay-triplets-linux-dynamic/#overlay-triplets-example). Custom triplets specific to various products must be maintained by product teams. Product teams may optionally decide to integrate their triplets in the mainline OpenTelemetry C++ SDK repo as-needed.
## Using Feature Packages
To install opentelemetry built with standard library API surface classes:
```console
vcpkg install opentelemetry[stdlib]
```
To install opentelemetry built with Abseil API surface classes:
```console
vcpkg install opentelemetry[abseil]
```
## Build with vcpkg dependencies
`CMakeLists.txt` in top-level directory lists the following package dependencies:
- `Protobuf` - required for OTLP exporter. Not required otherwise.
- `GTest` - required when building with tests enabled.
- `Benchmark` - required when building with tests enabled.
- `ms-gsl` - required for `gsl::span` when building with Standard Library with C++14 or C++17 compiler.
- `nlohmann-json` - required when building with zPages module.
- `prometheus-cpp` - required for Prometheus exporter.
It is possible to adjust the build system to use either vcpkg-installed dependencies or OS-provided dependencies, e.g. `brew` or `deb` packages.

Просмотреть файл

@ -17,8 +17,12 @@ if(BUILD_TESTING)
in_memory_span_exporter_test ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_exporter_in_memory)
gtest_add_tests(TARGET in_memory_span_data_test TEST_PREFIX exporter.
TEST_LIST in_memory_span_data_test)
gtest_add_tests(TARGET in_memory_span_exporter_test TEST_PREFIX exporter.
TEST_LIST in_memory_span_exporter_test)
gtest_add_tests(
TARGET in_memory_span_data_test
TEST_PREFIX exporter.
TEST_LIST in_memory_span_data_test)
gtest_add_tests(
TARGET in_memory_span_exporter_test
TEST_PREFIX exporter.
TEST_LIST in_memory_span_exporter_test)
endif()

Просмотреть файл

@ -15,8 +15,12 @@ if(BUILD_TESTING)
ostream_metrics_test ${GTEST_BOTH_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}
opentelemetry_exporter_ostream_metrics)
gtest_add_tests(TARGET ostream_metrics_test TEST_PREFIX exporter. TEST_LIST
ostream_metrics_test)
gtest_add_tests(TARGET ostream_span_test TEST_PREFIX exporter. TEST_LIST
ostream_span_test)
gtest_add_tests(
TARGET ostream_metrics_test
TEST_PREFIX exporter.
TEST_LIST ostream_metrics_test)
gtest_add_tests(
TARGET ostream_span_test
TEST_PREFIX exporter.
TEST_LIST ostream_span_test)
endif() # BUILD_TESTING

Просмотреть файл

@ -1,14 +1,15 @@
include_directories(include)
add_library(opentelemetry_exporter_otprotocol src/recordable.cc)
target_link_libraries(opentelemetry_exporter_otprotocol
$<TARGET_OBJECTS:opentelemetry_proto>)
target_link_libraries(opentelemetry_exporter_otprotocol opentelemetry_proto)
if(BUILD_TESTING)
add_executable(recordable_test test/recordable_test.cc)
target_link_libraries(
recordable_test ${GTEST_BOTH_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}
opentelemetry_exporter_otprotocol protobuf::libprotobuf)
gtest_add_tests(TARGET recordable_test TEST_PREFIX exporter. TEST_LIST
recordable_test)
gtest_add_tests(
TARGET recordable_test
TEST_PREFIX exporter.
TEST_LIST recordable_test)
endif() # BUILD_TESTING

Просмотреть файл

@ -3,6 +3,8 @@ foreach(testname prometheus_collector_test prometheus_exporter_utils_test)
target_link_libraries(
${testname} ${GTEST_BOTH_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}
prometheus_exporter prometheus-cpp::pull)
gtest_add_tests(TARGET ${testname} TEST_PREFIX exporter. TEST_LIST
${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX exporter.
TEST_LIST ${testname})
endforeach()

Просмотреть файл

@ -6,6 +6,8 @@ if(CURL_FOUND)
include_directories(${CURL_INCLUDE_DIR})
target_link_libraries(${FILENAME} ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} ${CURL_LIBRARIES})
gtest_add_tests(TARGET ${FILENAME} TEST_PREFIX ext.http.curl. TEST_LIST
${FILENAME})
gtest_add_tests(
TARGET ${FILENAME}
TEST_PREFIX ext.http.curl.
TEST_LIST ${FILENAME})
endif()

Просмотреть файл

@ -4,5 +4,8 @@ foreach(testname tracez_processor_test tracez_data_aggregator_test
target_link_libraries(${testname} ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_zpages)
gtest_add_tests(TARGET ${testname} TEST_PREFIX ext. TEST_LIST ${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX ext.
TEST_LIST ${testname})
endforeach()

Просмотреть файл

@ -5,7 +5,10 @@ foreach(testname
target_link_libraries(
${testname} ${GTEST_BOTH_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}
opentelemetry_common opentelemetry_trace)
gtest_add_tests(TARGET ${testname} TEST_PREFIX trace. TEST_LIST ${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX trace.
TEST_LIST ${testname})
endforeach()
add_executable(random_fork_test random_fork_test.cc)

Просмотреть файл

@ -2,5 +2,8 @@ foreach(testname logger_provider_sdk_test logger_sdk_test)
add_executable(${testname} "${testname}.cc")
target_link_libraries(${testname} ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_logs)
gtest_add_tests(TARGET ${testname} TEST_PREFIX logs. TEST_LIST ${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX logs.
TEST_LIST ${testname})
endforeach()

Просмотреть файл

@ -13,5 +13,8 @@ foreach(
add_executable(${testname} "${testname}.cc")
target_link_libraries(${testname} ${GTEST_BOTH_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} opentelemetry_metrics)
gtest_add_tests(TARGET ${testname} TEST_PREFIX metrics. TEST_LIST ${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX metrics.
TEST_LIST ${testname})
endforeach()

Просмотреть файл

@ -14,7 +14,10 @@ foreach(
target_link_libraries(
${testname} ${GTEST_BOTH_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}
opentelemetry_common opentelemetry_trace opentelemetry_exporter_in_memory)
gtest_add_tests(TARGET ${testname} TEST_PREFIX trace. TEST_LIST ${testname})
gtest_add_tests(
TARGET ${testname}
TEST_PREFIX trace.
TEST_LIST ${testname})
endforeach()
add_executable(sampler_benchmark sampler_benchmark.cc)

1
third_party/benchmark поставляемый Submodule

@ -0,0 +1 @@
Subproject commit 348aa2c964494b5947c0e7f96b82c1fe844d684f

1
third_party/googletest поставляемый Submodule

@ -0,0 +1 @@
Subproject commit a6dfd3aca7f2f91f95fc7ab650c95a48420d513d

1
third_party/json/BUILD поставляемый
Просмотреть файл

@ -1 +0,0 @@
exports_files(["LICENSE"])

1
third_party/ms-gsl поставляемый Submodule

@ -0,0 +1 @@
Subproject commit 6f4529395c5b7c2d661812257cd6780c67e54afa

1
third_party/opentelemetry-proto поставляемый Submodule

@ -0,0 +1 @@
Subproject commit f11e0538fd7dc30127ca6bfb2062e5d9f782b77b

2
third_party/opentelemetry-proto/README поставляемый
Просмотреть файл

@ -1,2 +0,0 @@
From: https://github.com/open-telemetry/opentelemetry-proto
Commit: e43e1abc40428a6ee98e3bfd79bec1dfa2ed18cd

Просмотреть файл

@ -1,9 +0,0 @@
# OpenTelemetry Collector Proto
This package describes the OpenTelemetry collector protocol.
## Packages
1. `common` package contains the common messages shared between different services.
2. `trace` package contains the Trace Service protos.
3. `metrics` package contains the Metrics Service protos.

Просмотреть файл

@ -1,45 +0,0 @@
// Copyright 2019, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package opentelemetry.proto.collector.metrics.v1;
import "opentelemetry/proto/metrics/v1/metrics.proto";
option java_multiple_files = true;
option java_package = "io.opentelemetry.proto.collector.metrics.v1";
option java_outer_classname = "MetricsServiceProto";
option go_package = "github.com/open-telemetry/opentelemetry-proto/gen/go/collector/metrics/v1";
// Service that can be used to push metrics between one Application
// instrumented with OpenTelemetry and a collector, or between a collector and a
// central collector.
service MetricsService {
// For performance reasons, it is recommended to keep this RPC
// alive for the entire life of the application.
rpc Export(ExportMetricsServiceRequest) returns (ExportMetricsServiceResponse) {}
}
message ExportMetricsServiceRequest {
// An array of ResourceMetrics.
// For data coming from a single resource this array will typically contain one
// element. Intermediary nodes (such as OpenTelemetry Collector) that receive
// data from multiple origins typically batch the data before forwarding further and
// in that case this array will contain multiple elements.
repeated opentelemetry.proto.metrics.v1.ResourceMetrics resource_metrics = 1;
}
message ExportMetricsServiceResponse {
}

Просмотреть файл

@ -1,9 +0,0 @@
# This is an API configuration to generate an HTTP/JSON -> gRPC gateway for the
# OpenTelemetry service using github.com/grpc-ecosystem/grpc-gateway.
type: google.api.Service
config_version: 3
http:
rules:
- selector: opentelemetry.proto.collector.metrics.v1.MetricsService.Export
post: /v1/metrics
body: "*"

Просмотреть файл

@ -1,48 +0,0 @@
// Copyright 2019, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
// NOTE: This proto is experimental and is subject to change at this point.
// Please do not use it at the moment.
package opentelemetry.proto.collector.trace.v1;
import "opentelemetry/proto/trace/v1/trace.proto";
option java_multiple_files = true;
option java_package = "io.opentelemetry.proto.collector.trace.v1";
option java_outer_classname = "TraceServiceProto";
option go_package = "github.com/open-telemetry/opentelemetry-proto/gen/go/collector/trace/v1";
// Service that can be used to push spans between one Application instrumented with
// OpenTelemetry and an collector, or between an collector and a central collector (in this
// case spans are sent/received to/from multiple Applications).
service TraceService {
// For performance reasons, it is recommended to keep this RPC
// alive for the entire life of the application.
rpc Export(ExportTraceServiceRequest) returns (ExportTraceServiceResponse) {}
}
message ExportTraceServiceRequest {
// An array of ResourceSpans.
// For data coming from a single resource this array will typically contain one
// element. Intermediary nodes (such as OpenTelemetry Collector) that receive
// data from multiple origins typically batch the data before forwarding further and
// in that case this array will contain multiple elements.
repeated opentelemetry.proto.trace.v1.ResourceSpans resource_spans = 1;
}
message ExportTraceServiceResponse {
}

Просмотреть файл

@ -1,9 +0,0 @@
# This is an API configuration to generate an HTTP/JSON -> gRPC gateway for the
# OpenTelemetry service using github.com/grpc-ecosystem/grpc-gateway.
type: google.api.Service
config_version: 3
http:
rules:
- selector: opentelemetry.proto.collector.trace.v1.TraceService.Export
post: /v1/trace
body: "*"

Просмотреть файл

@ -1,77 +0,0 @@
// Copyright 2019, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package opentelemetry.proto.common.v1;
option java_multiple_files = true;
option java_package = "io.opentelemetry.proto.common.v1";
option java_outer_classname = "CommonProto";
option go_package = "github.com/open-telemetry/opentelemetry-proto/gen/go/common/v1";
// AnyValue is used to represent any type of attribute value. AnyValue may contain a
// primitive value such as a string or integer or it may contain an arbitrary nested
// object containing arrays, key-value lists and primitives.
message AnyValue {
// The value is one of the listed fields. It is valid for all values to be unspecified
// in which case this AnyValue is considered to be "null".
oneof value {
string string_value = 1;
bool bool_value = 2;
int64 int_value = 3;
double double_value = 4;
ArrayValue array_value = 5;
KeyValueList kvlist_value = 6;
}
}
// ArrayValue is a list of AnyValue messages. We need ArrayValue as a message
// since oneof in AnyValue does not allow repeated fields.
message ArrayValue {
// Array of values. The array may be empty (contain 0 elements).
repeated AnyValue values = 1;
}
// KeyValueList is a list of KeyValue messages. We need KeyValueList as a message
// since `oneof` in AnyValue does not allow repeated fields. Everywhere else where we need
// a list of KeyValue messages (e.g. in Span) we use `repeated KeyValue` directly to
// avoid unnecessary extra wrapping (which slows down the protocol). The 2 approaches
// are semantically equivalent.
message KeyValueList {
// A collection of key/value pairs of key-value pairs. The list may be empty (may
// contain 0 elements).
repeated KeyValue values = 1;
}
// KeyValue is a key-value pair that is used to store Span attributes, Link
// attributes, etc.
message KeyValue {
string key = 1;
AnyValue value = 2;
}
// StringKeyValue is a pair of key/value strings. This is the simpler (and faster) version
// of KeyValue that only supports string values.
message StringKeyValue {
string key = 1;
string value = 2;
}
// InstrumentationLibrary is a message representing the instrumentation library information
// such as the fully qualified name and version.
message InstrumentationLibrary {
string name = 1;
string version = 2;
}

Просмотреть файл

@ -1,434 +0,0 @@
// Copyright 2019, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package opentelemetry.proto.metrics.v1;
import "opentelemetry/proto/common/v1/common.proto";
import "opentelemetry/proto/resource/v1/resource.proto";
option java_multiple_files = true;
option java_package = "io.opentelemetry.proto.metrics.v1";
option java_outer_classname = "MetricsProto";
option go_package = "github.com/open-telemetry/opentelemetry-proto/gen/go/metrics/v1";
// A collection of InstrumentationLibraryMetrics from a Resource.
message ResourceMetrics {
// The resource for the metrics in this message.
// If this field is not set then no resource info is known.
opentelemetry.proto.resource.v1.Resource resource = 1;
// A list of metrics that originate from a resource.
repeated InstrumentationLibraryMetrics instrumentation_library_metrics = 2;
}
// A collection of Metrics produced by an InstrumentationLibrary.
message InstrumentationLibraryMetrics {
// The instrumentation library information for the metrics in this message.
// If this field is not set then no library info is known.
opentelemetry.proto.common.v1.InstrumentationLibrary instrumentation_library = 1;
// A list of metrics that originate from an instrumentation library.
repeated Metric metrics = 2;
}
// Defines a Metric which has one or more timeseries.
//
// The data model and relation between entities is shown in the diagram below.
//
// - Metric is composed of a MetricDescriptor and a list of data points.
// - MetricDescriptor contains a list of label keys (shown horizontally).
// - Data is a list of DataPoints (shown vertically).
// - DataPoint contains a list of label values and a value.
//
// Metric
// +----------+ +------------------------+
// |descriptor|-------->| MetricDescriptor |
// | | |+-----+-----+ +-----+ |
// | | ||label|label|...|label| |
// | data|--+ ||key1 |key2 | |keyN | |
// +----------+ | |+-----+-----+ +-----+ |
// | +------------------------+
// |
// | +---------------------------+
// | |DataPoint 1 |
// v |+------+------+ +------+ |
// +-----+ ||label |label |...|label | |
// | 1 |-->||value1|value2|...|valueN| |
// +-----+ |+------+------+ +------+ |
// | . | |+-----+ |
// | . | ||value| |
// | . | |+-----+ |
// | . | +---------------------------+
// | . | .
// | . | .
// | . | .
// | . | +---------------------------+
// | . | |DataPoint M |
// +-----+ |+------+------+ +------+ |
// | M |-->||label |label |...|label | |
// +-----+ ||value1|value2|...|valueN| |
// |+------+------+ +------+ |
// |+-----+ |
// ||value| |
// |+-----+ |
// +---------------------------+
//
//-----------------------------------------------------------------------
// DataPoint is a value of specific type corresponding to a given moment in
// time. Each DataPoint is timestamped.
//
// DataPoint is strongly typed: each DataPoint type has a specific Protobuf message
// depending on the value type of the metric and thus there are currently 4 DataPoint
// messages, which correspond to the types of metric values.
message Metric {
// metric_descriptor describes the Metric.
MetricDescriptor metric_descriptor = 1;
// Data is a list of one or more DataPoints for a single metric. Only one of the
// following fields is used for the data, depending on the type of the metric defined
// by MetricDescriptor.type field.
repeated Int64DataPoint int64_data_points = 2;
repeated DoubleDataPoint double_data_points = 3;
repeated HistogramDataPoint histogram_data_points = 4;
repeated SummaryDataPoint summary_data_points = 5;
}
// Defines a metric type and its schema.
message MetricDescriptor {
// name of the metric, including its DNS name prefix. It must be unique.
string name = 1;
// description of the metric, which can be used in documentation.
string description = 2;
// unit in which the metric value is reported. Follows the format
// described by http://unitsofmeasure.org/ucum.html.
string unit = 3;
// Type is the type of values a metric has.
enum Type {
// INVALID_TYPE is the default Type, it MUST not be used.
INVALID_TYPE = 0;
// INT64 values are signed 64-bit integers.
//
// A Metric of this Type MUST store its values as Int64DataPoint.
INT64 = 1;
// MONOTONIC_INT64 values are monotonically increasing signed 64-bit
// integers.
//
// A Metric of this Type MUST store its values as Int64DataPoint.
MONOTONIC_INT64 = 2;
// DOUBLE values are double-precision floating-point numbers.
//
// A Metric of this Type MUST store its values as DoubleDataPoint.
DOUBLE = 3;
// MONOTONIC_DOUBLE values are monotonically increasing double-precision
// floating-point numbers.
//
// A Metric of this Type MUST store its values as DoubleDataPoint.
MONOTONIC_DOUBLE = 4;
// Histogram measurement.
// Corresponding values are stored in HistogramDataPoint.
HISTOGRAM = 5;
// Summary value. Some frameworks implemented Histograms as a summary of observations
// (usually things like request durations and response sizes). While it
// also provides a total count of observations and a sum of all observed
// values, it calculates configurable percentiles over a sliding time
// window.
// Corresponding values are stored in SummaryDataPoint.
SUMMARY = 6;
}
// type is the type of values this metric has.
Type type = 4;
// Temporality is the temporal quality values of a metric have. It
// describes how those values relate to the time interval over which they
// are reported.
enum Temporality {
// INVALID_TEMPORALITY is the default Temporality, it MUST not be
// used.
INVALID_TEMPORALITY = 0;
// INSTANTANEOUS is a metric whose values are measured at a particular
// instant. The values are not aggregated over any time interval and are
// unique per timestamp. As such, these metrics are not expected to have
// an associated start time.
INSTANTANEOUS = 1;
// DELTA is a metric whose values are the aggregation of measurements
// made over a time interval. Successive metrics contain aggregation of
// values from continuous and non-overlapping intervals.
//
// The values for a DELTA metric are based only on the time interval
// associated with one measurement cycle. There is no dependency on
// previous measurements like is the case for CUMULATIVE metrics.
//
// For example, consider a system measuring the number of requests that
// it receives and reports the sum of these requests every second as a
// DELTA metric:
//
// 1. The system starts receiving at time=t_0.
// 2. A request is received, the system measures 1 request.
// 3. A request is received, the system measures 1 request.
// 4. A request is received, the system measures 1 request.
// 5. The 1 second collection cycle ends. A metric is exported for the
// number of requests received over the interval of time t_0 to
// t_0+1 with a value of 3.
// 6. A request is received, the system measures 1 request.
// 7. A request is received, the system measures 1 request.
// 8. The 1 second collection cycle ends. A metric is exported for the
// number of requests received over the interval of time t_0+1 to
// t_0+2 with a value of 2.
DELTA = 2;
// CUMULATIVE is a metric whose values are the aggregation of
// successively made measurements from a fixed start time until the last
// reported measurement. This means that current values of a CUMULATIVE
// metric depend on all previous measurements since the start time.
// Because of this, the sender is required to retain this state in some
// form. If this state is lost or invalidated, the CUMULATIVE metric
// values MUST be reset and a new fixed start time following the last
// reported measurement time sent MUST be used.
//
// For example, consider a system measuring the number of requests that
// it receives and reports the sum of these requests every second as a
// CUMULATIVE metric:
//
// 1. The system starts receiving at time=t_0.
// 2. A request is received, the system measures 1 request.
// 3. A request is received, the system measures 1 request.
// 4. A request is received, the system measures 1 request.
// 5. The 1 second collection cycle ends. A metric is exported for the
// number of requests received over the interval of time t_0 to
// t_0+1 with a value of 3.
// 6. A request is received, the system measures 1 request.
// 7. A request is received, the system measures 1 request.
// 8. The 1 second collection cycle ends. A metric is exported for the
// number of requests received over the interval of time t_0 to
// t_0+2 with a value of 5.
// 9. The system experiences a fault and loses state.
// 10. The system recovers and resumes receiving at time=t_1.
// 11. A request is received, the system measures 1 request.
// 12. The 1 second collection cycle ends. A metric is exported for the
// number of requests received over the interval of time t_1 to
// t_0+1 with a value of 1.
CUMULATIVE = 3;
}
// temporality is the Temporality of values this metric has.
Temporality temporality = 5;
}
// Int64DataPoint is a single data point in a timeseries that describes the time-varying
// values of a int64 metric.
message Int64DataPoint {
// The set of labels that uniquely identify this timeseries.
repeated opentelemetry.proto.common.v1.StringKeyValue labels = 1;
// start_time_unix_nano is the time when the cumulative value was reset to zero.
// This is used for Counter type only. For Gauge the value is not specified and
// defaults to 0.
//
// The cumulative value is over the time interval (start_time_unix_nano, time_unix_nano].
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
//
// Value of 0 indicates that the timestamp is unspecified. In that case the timestamp
// may be decided by the backend.
fixed64 start_time_unix_nano = 2;
// time_unix_nano is the moment when this value was recorded.
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
fixed64 time_unix_nano = 3;
// value itself.
int64 value = 4;
}
// DoubleDataPoint is a single data point in a timeseries that describes the time-varying
// value of a double metric.
message DoubleDataPoint {
// The set of labels that uniquely identify this timeseries.
repeated opentelemetry.proto.common.v1.StringKeyValue labels = 1;
// start_time_unix_nano is the time when the cumulative value was reset to zero.
// This is used for Counter type only. For Gauge the value is not specified and
// defaults to 0.
//
// The cumulative value is over the time interval (start_time_unix_nano, time_unix_nano].
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
//
// Value of 0 indicates that the timestamp is unspecified. In that case the timestamp
// may be decided by the backend.
fixed64 start_time_unix_nano = 2;
// time_unix_nano is the moment when this value was recorded.
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
fixed64 time_unix_nano = 3;
// value itself.
double value = 4;
}
// HistogramDataPoint is a single data point in a timeseries that describes the time-varying
// values of a Histogram. A Histogram contains summary statistics for a population of values,
// it may optionally contain the distribution of those values across a set of buckets.
message HistogramDataPoint {
// The set of labels that uniquely identify this timeseries.
repeated opentelemetry.proto.common.v1.StringKeyValue labels = 1;
// start_time_unix_nano is the time when the cumulative value was reset to zero.
//
// The cumulative value is over the time interval (start_time_unix_nano, time_unix_nano].
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
//
// Value of 0 indicates that the timestamp is unspecified. In that case the timestamp
// may be decided by the backend.
// Note: this field is always unspecified and ignored if MetricDescriptor.type==GAUGE_HISTOGRAM.
fixed64 start_time_unix_nano = 2;
// time_unix_nano is the moment when this value was recorded.
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
fixed64 time_unix_nano = 3;
// count is the number of values in the population. Must be non-negative. This value
// must be equal to the sum of the "count" fields in buckets if a histogram is provided.
uint64 count = 4;
// sum of the values in the population. If count is zero then this field
// must be zero. This value must be equal to the sum of the "sum" fields in buckets if
// a histogram is provided.
double sum = 5;
// Bucket contains values for a bucket.
message Bucket {
// The number of values in each bucket of the histogram, as described by
// bucket_options.
uint64 count = 1;
// Exemplars are example points that may be used to annotate aggregated
// Histogram values. They are metadata that gives information about a
// particular value added to a Histogram bucket.
message Exemplar {
// Value of the exemplar point. It determines which bucket the exemplar belongs to.
// If bucket_options define bounds for this bucket then this value must be within
// the defined bounds.
double value = 1;
// time_unix_nano is the moment when this exemplar was recorded.
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
fixed64 time_unix_nano = 2;
// exemplar_attachments are contextual information about the example value.
// Keys in this list must be unique.
repeated opentelemetry.proto.common.v1.StringKeyValue attachments = 3;
}
// exemplar is an optional representative value of the bucket.
Exemplar exemplar = 2;
}
// buckets is an optional field contains the values of histogram for each bucket.
//
// The sum of the values in the buckets "count" field must equal the value in the count field.
//
// The number of elements in buckets array must be by one greater than the
// number of elements in bucket_bounds array.
//
// Note: if HistogramDataPoint.bucket_options defines bucket bounds then this field
// must also be present and number of elements in this field must be equal to the
// number of buckets defined by bucket_options.
repeated Bucket buckets = 6;
// A histogram may optionally contain the distribution of the values in the population.
// In that case one of the option fields below and "buckets" field both must be defined.
// Otherwise all option fields and "buckets" field must be omitted in which case the
// distribution of values in the histogram is unknown and only the total count and sum are known.
// explicit_bounds is the only supported bucket option currently.
// TODO: Add more bucket options.
// explicit_bounds specifies buckets with explicitly defined bounds for values.
// The bucket boundaries are described by "bounds" field.
//
// This defines size(bounds) + 1 (= N) buckets. The boundaries for bucket
// at index i are:
//
// [0, bounds[i]) for i == 0
// [bounds[i-1], bounds[i]) for 0 < i < N-1
// [bounds[i], +infinity) for i == N-1
// The values in bounds array must be strictly increasing and > 0.
//
// Note: only [a, b) intervals are currently supported for each bucket. If we decides
// to also support (a, b] intervals we should add support for these by defining a boolean
// value which decides what type of intervals to use.
repeated double explicit_bounds = 7;
}
// SummaryDataPoint is a single data point in a timeseries that describes the time-varying
// values of a Summary metric.
message SummaryDataPoint {
// The set of labels that uniquely identify this timeseries.
repeated opentelemetry.proto.common.v1.StringKeyValue labels = 1;
// start_time_unix_nano is the time when the cumulative value was reset to zero.
//
// The cumulative value is over the time interval (start_time_unix_nano, time_unix_nano].
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
//
// Value of 0 indicates that the timestamp is unspecified. In that case the timestamp
// may be decided by the backend.
fixed64 start_time_unix_nano = 2;
// time_unix_nano is the moment when this value was recorded.
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
fixed64 time_unix_nano = 3;
// The total number of recorded values since start_time. Optional since
// some systems don't expose this.
uint64 count = 4;
// The total sum of recorded values since start_time. Optional since some
// systems don't expose this. If count is zero then this field must be zero.
double sum = 5;
// Represents the value at a given percentile of a distribution.
//
// To record Min and Max values following conventions are used:
// - The 100th percentile is equivalent to the maximum value observed.
// - The 0th percentile is equivalent to the minimum value observed.
//
// See the following issue for more context:
// https://github.com/open-telemetry/opentelemetry-proto/issues/125
message ValueAtPercentile {
// The percentile of a distribution. Must be in the interval
// [0.0, 100.0].
double percentile = 1;
// The value at the given percentile of a distribution.
double value = 2;
}
// A list of values at different percentiles of the distribution calculated
// from the current snapshot. The percentiles must be strictly increasing.
repeated ValueAtPercentile percentile_values = 6;
}

Просмотреть файл

@ -1,34 +0,0 @@
// Copyright 2019, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package opentelemetry.proto.resource.v1;
import "opentelemetry/proto/common/v1/common.proto";
option java_multiple_files = true;
option java_package = "io.opentelemetry.proto.resource.v1";
option java_outer_classname = "ResourceProto";
option go_package = "github.com/open-telemetry/opentelemetry-proto/gen/go/resource/v1";
// Resource information.
message Resource {
// Set of labels that describe the resource.
repeated opentelemetry.proto.common.v1.KeyValue attributes = 1;
// dropped_attributes_count is the number of dropped attributes. If the value is 0, then
// no attributes were dropped.
uint32 dropped_attributes_count = 2;
}

Просмотреть файл

@ -1,261 +0,0 @@
// Copyright 2019, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package opentelemetry.proto.trace.v1;
import "opentelemetry/proto/common/v1/common.proto";
import "opentelemetry/proto/resource/v1/resource.proto";
option java_multiple_files = true;
option java_package = "io.opentelemetry.proto.trace.v1";
option java_outer_classname = "TraceProto";
option go_package = "github.com/open-telemetry/opentelemetry-proto/gen/go/trace/v1";
// A collection of InstrumentationLibrarySpans from a Resource.
message ResourceSpans {
// The resource for the spans in this message.
// If this field is not set then no resource info is known.
opentelemetry.proto.resource.v1.Resource resource = 1;
// A list of InstrumentationLibrarySpans that originate from a resource.
repeated InstrumentationLibrarySpans instrumentation_library_spans = 2;
}
// A collection of Spans produced by an InstrumentationLibrary.
message InstrumentationLibrarySpans {
// The instrumentation library information for the spans in this message.
// If this field is not set then no library info is known.
opentelemetry.proto.common.v1.InstrumentationLibrary instrumentation_library = 1;
// A list of Spans that originate from an instrumentation library.
repeated Span spans = 2;
}
// Span represents a single operation within a trace. Spans can be
// nested to form a trace tree. Spans may also be linked to other spans
// from the same or different trace and form graphs. Often, a trace
// contains a root span that describes the end-to-end latency, and one
// or more subspans for its sub-operations. A trace can also contain
// multiple root spans, or none at all. Spans do not need to be
// contiguous - there may be gaps or overlaps between spans in a trace.
//
// The next available field id is 17.
message Span {
// A unique identifier for a trace. All spans from the same trace share
// the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes
// is considered invalid.
//
// This field is semantically required. Receiver should generate new
// random trace_id if empty or invalid trace_id was received.
//
// This field is required.
bytes trace_id = 1;
// A unique identifier for a span within a trace, assigned when the span
// is created. The ID is an 8-byte array. An ID with all zeroes is considered
// invalid.
//
// This field is semantically required. Receiver should generate new
// random span_id if empty or invalid span_id was received.
//
// This field is required.
bytes span_id = 2;
// trace_state conveys information about request position in multiple distributed tracing graphs.
// It is a trace_state in w3c-trace-context format: https://www.w3.org/TR/trace-context/#tracestate-header
// See also https://github.com/w3c/distributed-tracing for more details about this field.
string trace_state = 3;
// The `span_id` of this span's parent span. If this is a root span, then this
// field must be empty. The ID is an 8-byte array.
bytes parent_span_id = 4;
// A description of the span's operation.
//
// For example, the name can be a qualified method name or a file name
// and a line number where the operation is called. A best practice is to use
// the same display name at the same call point in an application.
// This makes it easier to correlate spans in different traces.
//
// This field is semantically required to be set to non-empty string.
// When null or empty string received - receiver may use string "name"
// as a replacement. There might be smarted algorithms implemented by
// receiver to fix the empty span name.
//
// This field is required.
string name = 5;
// SpanKind is the type of span. Can be used to specify additional relationships between spans
// in addition to a parent/child relationship.
enum SpanKind {
// Unspecified. Do NOT use as default.
// Implementations MAY assume SpanKind to be INTERNAL when receiving UNSPECIFIED.
SPAN_KIND_UNSPECIFIED = 0;
// Indicates that the span represents an internal operation within an application,
// as opposed to an operations happening at the boundaries. Default value.
INTERNAL = 1;
// Indicates that the span covers server-side handling of an RPC or other
// remote network request.
SERVER = 2;
// Indicates that the span describes a request to some remote service.
CLIENT = 3;
// Indicates that the span describes a producer sending a message to a broker.
// Unlike CLIENT and SERVER, there is often no direct critical path latency relationship
// between producer and consumer spans. A PRODUCER span ends when the message was accepted
// by the broker while the logical processing of the message might span a much longer time.
PRODUCER = 4;
// Indicates that the span describes consumer receiving a message from a broker.
// Like the PRODUCER kind, there is often no direct critical path latency relationship
// between producer and consumer spans.
CONSUMER = 5;
}
// Distinguishes between spans generated in a particular context. For example,
// two spans with the same name may be distinguished using `CLIENT` (caller)
// and `SERVER` (callee) to identify queueing latency associated with the span.
SpanKind kind = 6;
// start_time_unix_nano is the start time of the span. On the client side, this is the time
// kept by the local machine where the span execution starts. On the server side, this
// is the time when the server's application handler starts running.
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
//
// This field is semantically required and it is expected that end_time >= start_time.
fixed64 start_time_unix_nano = 7;
// end_time_unix_nano is the end time of the span. On the client side, this is the time
// kept by the local machine where the span execution ends. On the server side, this
// is the time when the server application handler stops running.
// Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
//
// This field is semantically required and it is expected that end_time >= start_time.
fixed64 end_time_unix_nano = 8;
// attributes is a collection of key/value pairs. The value can be a string,
// an integer, a double or the Boolean values `true` or `false`. Note, global attributes
// like server name can be set using the resource API. Examples of attributes:
//
// "/http/user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
// "/http/server_latency": 300
// "abc.com/myattribute": true
// "abc.com/score": 10.239
repeated opentelemetry.proto.common.v1.KeyValue attributes = 9;
// dropped_attributes_count is the number of attributes that were discarded. Attributes
// can be discarded because their keys are too long or because there are too many
// attributes. If this value is 0, then no attributes were dropped.
uint32 dropped_attributes_count = 10;
// Event is a time-stamped annotation of the span, consisting of user-supplied
// text description and key-value pairs.
message Event {
// time_unix_nano is the time the event occurred.
fixed64 time_unix_nano = 1;
// name of the event.
// This field is semantically required to be set to non-empty string.
string name = 2;
// attributes is a collection of attribute key/value pairs on the event.
repeated opentelemetry.proto.common.v1.KeyValue attributes = 3;
// dropped_attributes_count is the number of dropped attributes. If the value is 0,
// then no attributes were dropped.
uint32 dropped_attributes_count = 4;
}
// events is a collection of Event items.
repeated Event events = 11;
// dropped_events_count is the number of dropped events. If the value is 0, then no
// events were dropped.
uint32 dropped_events_count = 12;
// A pointer from the current span to another span in the same trace or in a
// different trace. For example, this can be used in batching operations,
// where a single batch handler processes multiple requests from different
// traces or when the handler receives a request from a different project.
message Link {
// A unique identifier of a trace that this linked span is part of. The ID is a
// 16-byte array.
bytes trace_id = 1;
// A unique identifier for the linked span. The ID is an 8-byte array.
bytes span_id = 2;
// The trace_state associated with the link.
string trace_state = 3;
// attributes is a collection of attribute key/value pairs on the link.
repeated opentelemetry.proto.common.v1.KeyValue attributes = 4;
// dropped_attributes_count is the number of dropped attributes. If the value is 0,
// then no attributes were dropped.
uint32 dropped_attributes_count = 5;
}
// links is a collection of Links, which are references from this span to a span
// in the same or different trace.
repeated Link links = 13;
// dropped_links_count is the number of dropped links after the maximum size was
// enforced. If this value is 0, then no links were dropped.
uint32 dropped_links_count = 14;
// An optional final status for this span. Semantically when Status
// wasn't set it is means span ended without errors and assume
// Status.Ok (code = 0).
Status status = 15;
}
// The Status type defines a logical error model that is suitable for different
// programming environments, including REST APIs and RPC APIs.
message Status {
// StatusCode mirrors the codes defined at
// https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/api-tracing.md#statuscanonicalcode
enum StatusCode {
Ok = 0;
Cancelled = 1;
UnknownError = 2;
InvalidArgument = 3;
DeadlineExceeded = 4;
NotFound = 5;
AlreadyExists = 6;
PermissionDenied = 7;
ResourceExhausted = 8;
FailedPrecondition = 9;
Aborted = 10;
OutOfRange = 11;
Unimplemented = 12;
InternalError = 13;
Unavailable = 14;
DataLoss = 15;
Unauthenticated = 16;
};
// The status code. This is optional field. It is safe to assume 0 (OK)
// when not set.
StatusCode code = 1;
// A developer-facing human readable error message.
string message = 2;
}

Просмотреть файл

@ -1,78 +0,0 @@
// Copyright 2019, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package opentelemetry.proto.trace.v1;
option java_multiple_files = true;
option java_package = "io.opentelemetry.proto.trace.v1";
option java_outer_classname = "TraceConfigProto";
option go_package = "github.com/open-telemetry/opentelemetry-proto/gen/go/collector/trace/v1";
// Global configuration of the trace service. All fields must be specified, or
// the default (zero) values will be used for each type.
message TraceConfig {
// The global default sampler used to make decisions on span sampling.
oneof sampler {
ConstantSampler constant_sampler = 1;
ProbabilitySampler probability_sampler = 2;
RateLimitingSampler rate_limiting_sampler = 3;
}
// The global default max number of attributes per span.
int64 max_number_of_attributes = 4;
// The global default max number of annotation events per span.
int64 max_number_of_timed_events= 5;
// The global default max number of attributes per timed event.
int64 max_number_of_attributes_per_timed_event = 6;
// The global default max number of link entries per span.
int64 max_number_of_links = 7;
// The global default max number of attributes per span.
int64 max_number_of_attributes_per_link = 8;
}
// Sampler that always makes a constant decision on span sampling.
message ConstantSampler {
// How spans should be sampled:
// - Always off
// - Always on
// - Always follow the parent Span's decision (off if no parent).
enum ConstantDecision {
ALWAYS_OFF = 0;
ALWAYS_ON = 1;
ALWAYS_PARENT = 2;
}
ConstantDecision decision = 1;
}
// Sampler that tries to uniformly sample traces with a given probability.
// The probability of sampling a trace is equal to that of the specified probability.
message ProbabilitySampler {
// The desired probability of sampling. Must be within [0.0, 1.0].
double samplingProbability = 1;
}
// Sampler that tries to sample with a rate per time window.
message RateLimitingSampler {
// Rate per second.
int64 qps = 1;
}

1
third_party/prometheus-cpp поставляемый Submodule

@ -0,0 +1 @@
Subproject commit 27c3d670c66985c0096d378cdd780088b08bc8f4

35
tools/build-benchmark.cmd Normal file
Просмотреть файл

@ -0,0 +1,35 @@
@echo off
set VS_TOOLS_VERSION=vs2019
set CMAKE_GEN="Visual Studio 16 2019"
echo Building Google Benchmark (test only dependency)...
@setlocal ENABLEEXTENSIONS
echo Auto-detecting Visual Studio version...
call "%~dp0\vcvars.cmd"
pushd "%~dp0\.."
set "ROOT=%CD%"
set MAXCPUCOUNT=%NUMBER_OF_PROCESSORS%
set platform=
if not exist "%ROOT%\third_party\benchmark\" (
echo "Google Benchmark library is not available, skipping benchmark build."
call skip_the_build
)
cd "%ROOT%\third_party\benchmark\"
set "GOOGLETEST_PATH=%ROOT%\third_party\googletest"
if not exist "build" (
mkdir build
)
cd build
REM By default we generate the project for the older Visual Studio 2017 even if we have newer version installed
cmake ../ -G %CMAKE_GEN% -Ax64 -DBENCHMARK_ENABLE_TESTING=OFF
set SOLUTION=%ROOT%\third_party\benchmark\build\benchmark.sln
msbuild %SOLUTION% /maxcpucount:%MAXCPUCOUNT% /p:Configuration=Debug /p:Platform=x64
msbuild %SOLUTION% /maxcpucount:%MAXCPUCOUNT% /p:Configuration=Release /p:Platform=x64
popd
:skip_the_build

26
tools/build-benchmark.sh Executable file
Просмотреть файл

@ -0,0 +1,26 @@
#!/usr/bin/env bash
# Switch to workspace root directory first
DIR="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
WORKSPACE_ROOT=$DIR/..
pushd $WORKSPACE_ROOT
BUILD_ROOT=${BUILD_ROOT:-/tmp/build}
[[ ! -d "${BUILD_ROOT}" ]] && mkdir -p "${BUILD_ROOT}" || echo "Output directory already exists: BUILD_ROOT=${BUILD_ROOT}"
BENCHMARK_BUILD_ROOT=${BUILD_ROOT}/benchmark
[[ ! -d "${BENCHMARK_BUILD_ROOT}" ]] && mkdir -p "${BENCHMARK_BUILD_ROOT}" || echo "Output directory already exists: BENCHMARK_BUILD_ROOT=${BENCHMARK_BUILD_ROOT}"
# Build Google Benchmark with given Google Test
export BENCHMARK_SRC_PATH=${WORKSPACE_ROOT}/third_party/benchmark
if [ ! -d "${BENCHMARK_SRC_PATH}" ]; then
echo "Google Benchmark not found!"
exit 1
fi
pushd ${BENCHMARK_BUILD_ROOT}
cmake $BENCHMARK_SRC_PATH -DBENCHMARK_ENABLE_TESTING=OFF -DCMAKE_INSTALL_PREFIX=$BUILD_ROOT
make install
popd
popd

69
tools/build-docker.cmd Normal file
Просмотреть файл

@ -0,0 +1,69 @@
@echo off
pushd %~dp0
REM Default arguments
set A1="/build/tools/build.sh"
set A2=clean
if "%~1"=="" goto help
REM Image name is always the first argument
set IMAGE_NAME=%1
REM Process other optional arguments
shift
if NOT "%1"=="" (
set A1=%1
set A2=%2
set A3=%3
set A4=%4
set A5=%5
set A6=%6
set A7=%7
set A8=%8
set A9=%9
set A10=%10
)
WHERE choco >NUL 2>NUL
if %ERRORLEVEL% NEQ 0 (
echo This script requires Chocolatey to install Docker: https://chocolatey.org/
)
WHERE docker >NUL 2>NUL
if "%ERRORLEVEL%"=="0" goto docker_ok
choco install -y docker-desktop
choco install -y docker-cli
:docker_ok
docker info
docker version
echo Running in container %IMAGE_NAME%
sc query com.docker.service
REM Force reinstallation of build tools
del ..\.buildtools 2>NUL
echo Building docker image in %CD%...
copy /Y setup-cmake.sh ..\docker\%IMAGE_NAME%\
copy /Y setup-protobuf.sh ..\docker\%IMAGE_NAME%\
docker build --rm -t %IMAGE_NAME% ../docker/%IMAGE_NAME%
cd ..
echo Starting build...
docker run -it -v %CD%:/build %IMAGE_NAME% %A1% %A2%
popd
exit
:help
cd ..
echo.
echo Usage: build-docker.cmd [image_name] [arguments...]
echo.
echo Default command:
echo.
echo docker run -it -v $WORKSPACE_ROOT:/build IMAGE_NAME %A1% %A2%
echo.
echo Supported images:
echo =================
dir /B docker
popd

37
tools/build-gtest.sh Executable file
Просмотреть файл

@ -0,0 +1,37 @@
#!/usr/bin/env bash
# Switch to workspace root directory first
DIR="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
WORKSPACE_ROOT=$DIR/..
pushd $WORKSPACE_ROOT
BUILD_ROOT=${BUILD_ROOT:-/tmp/build}
[[ ! -d "${BUILD_ROOT}" ]] && mkdir -p "${BUILD_ROOT}" || echo "Output directory already exists: BUILD_ROOT=${BUILD_ROOT}"
GTEST_BUILD_ROOT=${BUILD_ROOT}/gtest
[[ ! -d "${GTEST_BUILD_ROOT}" ]] && mkdir -p "${GTEST_BUILD_ROOT}" || echo "Output directory already exists: GTEST_BUILD_ROOT=${GTEST_BUILD_ROOT}"
# Path to Google Test source. Prefer Google Test from submodule if available:
export GTEST_SRC_PATH=${WORKSPACE_ROOT}/third_party/googletest
if [ ! -d "${GTEST_SRC_PATH}" ]; then
# If not available, then use Google Test that is installed by OS package:
export GTEST_SRC_PATH=/usr/src/gtest
if [ ! -d "${GTEST_SRC_PATH}" ]; then
echo "GTest not found!"
exit 1
fi
echo Building GTest from source: ${GTEST_SRC_PATH} ...
fi
pushd $GTEST_BUILD_ROOT
cmake -Dgtest_build_samples=OFF \
-Dgmock_build_samples=OFF \
-Dgtest_build_tests=OFF \
-Dgmock_build_tests=OFF \
-DCMAKE_CXX_FLAGS="-fPIC $CXX_FLAGS -Wno-return-type" \
-DCMAKE_INSTALL_PREFIX=$BUILD_ROOT \
"${GTEST_SRC_PATH}"
make install
popd
popd

24
tools/build-vcpkg.sh Executable file
Просмотреть файл

@ -0,0 +1,24 @@
#!/bin/bash
export PATH=/usr/local/bin:$PATH
DIR="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
WORKSPACE_ROOT=$DIR/..
export VCPKG_ROOT=$WORKSPACE_ROOT/tools/vcpkg
export PATH=$VCPKG_ROOT:$PATH
if [[ ! -f $DIR/vcpkg/vcpkg ]] ; then
pushd $DIR/vcpkg
./bootstrap-vcpkg.sh
popd
fi
vcpkg install gtest
vcpkg install benchmark
vcpkg install ms-gsl
vcpkg install nlohmann-json
vcpkg install abseil
vcpkg install protobuf
cd $WORKSPACE_ROOT
export USE_VCPKG=1
./tools/build.sh ${1-nostd} ${2--DCMAKE_TOOLCHAIN_FILE=$WORKSPACE_ROOT/tools/vcpkg/scripts/buildsystems/vcpkg.cmake}

11
tools/build-vs2015.cmd Normal file
Просмотреть файл

@ -0,0 +1,11 @@
REM Build with Visual Studio 2015
set "VS_TOOLS_VERSION=vs2015"
set ARCH=Win64
if NOT "%1"=="" (
set ARCH=%1
)
set "CMAKE_GEN=Visual Studio 14 2015 %ARCH%"
cd %~dp0
call setup-buildtools.cmd
REM TODO: currently we cannot build without Abseil variant for Visual Studio 2015
call build.cmd -DWITH_ABSEIL:BOOL=ON

10
tools/build-vs2017.cmd Normal file
Просмотреть файл

@ -0,0 +1,10 @@
REM Build with Visual Studio 2017
set "VS_TOOLS_VERSION=vs2017"
set ARCH=Win64
if NOT "%1"=="" (
set ARCH=%1
)
set "CMAKE_GEN=Visual Studio 15 2017 %ARCH%"
cd %~dp0
call setup-buildtools.cmd
call build.cmd

15
tools/build-vs2019.cmd Normal file
Просмотреть файл

@ -0,0 +1,15 @@
REM Build with Visual Studio 2017
set "VS_TOOLS_VERSION=vs2019"
set ARCH=Win64
if NOT "%1"=="" (
set ARCH=%1
)
if "%ARCH%"=="Win64" (
REM Parameter needed for CMake Visual Studio 2019 generator
set CMAKE_ARCH=x64
)
set "CMAKE_GEN=Visual Studio 16 2019"
cd %~dp0
call setup-buildtools.cmd
call build.cmd

80
tools/build.cmd Normal file
Просмотреть файл

@ -0,0 +1,80 @@
@echo off
REM Currently we require Visual Studio 2019 for C++20 build targeting Release/x64.
REM
REM TODO: allow specifying compiler version as argument.
REM
REM Supported versions for nostd build:
REM - vs2015 (C++11)
REM - vs2017 (C++14)
REM - vs2019 (C++20)
REM
REM Supported versions for STL build:
REM - vs2017 (C++14)
REM - vs2019 (C++20)
REM
if "%VS_TOOLS_VERSION%" == "" set "VS_TOOLS_VERSION=vs2019"
if "%CMAKE_GEN%" == "" set "CMAKE_GEN=Visual Studio 16 2019"
pushd %~dp0
setlocal enableextensions
setlocal enabledelayedexpansion
set "ROOT=%~dp0\.."
if ("%CMAKE_ARCH%"=="") (
set CMAKE_ARCH=x64
)
REM Use preinstalled vcpkg if installed or use our local
if "%VCPKG_INSTALLATION_ROOT%" neq "" (
set "VCPKG_CMAKE=%VCPKG_INSTALLATION_ROOT%\scripts\buildsystems\vcpkg.cmake"
) else (
set "VCPKG_CMAKE=%CD%\vcpkg\scripts\buildsystems\vcpkg.cmake"
)
REM ********************************************************************
REM Setup compiler environment
REM ********************************************************************
call "%~dp0\vcvars.cmd"
REM ********************************************************************
REM Use cmake
REM ********************************************************************
set "PATH=%PATH%;C:\Program Files\CMake\bin\"
REM ********************************************************************
REM Build with nostd implementation
REM ********************************************************************
set CONFIG=-DWITH_STL:BOOL=OFF %*
set "OUTDIR=%ROOT%\out\%VS_TOOLS_VERSION%\nostd"
call :build_config
REM ********************************************************************
REM Build with STL implementation only for vs2017+
REM ********************************************************************
if "%VS_TOOLS_VERSION%" neq "vs2015" (
set CONFIG=-DWITH_STL:BOOL=ON %*
set "OUTDIR=%ROOT%\out\%VS_TOOLS_VERSION%\stl"
call :build_config
)
popd
REM ********************************************************************
REM ********************************************************************
REM Function that allows to build given build configuration
REM ********************************************************************
:build_config
REM TODO: consider rmdir for clean builds
if not exist "%OUTDIR%" mkdir "%OUTDIR%"
cd "%OUTDIR%"
if ("%VS_TOOLS_VERSION%"=="vs2019") (
REM Only latest vs2019 generator supports and requires -A parameter
cmake %ROOT% -G "%CMAKE_GEN%" -A %CMAKE_ARCH% -DCMAKE_TOOLCHAIN_FILE="%VCPKG_CMAKE%" %CONFIG%
) else (
cmake %ROOT% -G "%CMAKE_GEN%" -DCMAKE_TOOLCHAIN_FILE="%VCPKG_CMAKE%" %CONFIG%
)
set "SOLUTION=%OUTDIR%\opentelemetry-cpp.sln"
msbuild "%SOLUTION%" /p:Configuration=Release /p:VcpkgEnabled=true
exit /b 0

187
tools/build.sh Executable file
Просмотреть файл

@ -0,0 +1,187 @@
#!/bin/bash
export PATH=/usr/local/bin:$PATH
##
## Install all build tools and dependencies on Mac
##
function install_mac_tools {
if [ ! -f $BUILDTOOLS_FILE ] ; then
$WORKSPACE_ROOT/tools/setup-buildtools-mac.sh
echo > $BUILDTOOLS_FILE
else
echo "Build tools already installed. Skipping build tools installation."
fi
}
##
## Install all build tools and dependencies on Linux
##
function install_linux_tools {
if [ ! -f $BUILDTOOLS_FILE ] ; then
sudo $WORKSPACE_ROOT/tools/setup-buildtools.sh
echo > $BUILDTOOLS_FILE
else
echo "Build tools already installed. Skipping build tools installation."
fi
}
##
## Build dependencies
##
function build_dependencies {
# Build Google Benchmark
$WORKSPACE_ROOT/tools/build-benchmark.sh
# Build Google Test
$WORKSPACE_ROOT/tools/build-gtest.sh
}
##
## Build specific configuration for a given platform
##
function build {
echo "Build configuration: $BUILD_CONFIG"
cd $WORKSPACE_ROOT
export BUILD_ROOT=`pwd`/out/$PLATFORM_NAME/$BUILD_CONFIG
mkdir -p $BUILD_ROOT
if [ ! -w $BUILD_ROOT ] ; then
echo "Unable to create output directory: $BUILD_ROOT"
exit 1
fi
if [ -z ${USE_VCPKG} ] ; then
# TODO: consider that dependencies may also be coming from OS or brew
build_dependencies
else
echo VCPKG_ROOT=${VCPKG_ROOT}
# Prefer ninja from VCPKG if available
NINJA=$WORKSPACE_ROOT/`find tools/vcpkg -name ninja -type f -print -quit`
if [ -z ${NINJA} ] ; then
NINJA=`which ninja`
fi
fi
# Build OpenTelemetry SDK
pushd $BUILD_ROOT
if [ -z ${NINJA} ] ; then
cmake $BUILD_OPTIONS $WORKSPACE_ROOT
make
else
cmake -G "Ninja" $BUILD_OPTIONS $WORKSPACE_ROOT
echo Building with NINJA=$NINJA
$NINJA
fi
popd
}
function runtests {
pushd $BUILD_ROOT
ctest
popd
}
##
## Clean
##
function clean {
rm -f CMakeCache.txt *.cmake
rm -rf out
rm -rf .buildtools
# make clean
}
##
## Detect compiler
##
function detect_compiler {
if [ -z "${CC}" ] ; then
# Compiler autodetection
if [ -z "${APPLE}" ] ; then
# Prefer gcc for non-Apple
if [ -f /usr/bin/gcc ] ; then
echo "gcc version: `gcc --version`"
PLATFORM_NAME=`gcc -dumpmachine`-gcc-`gcc -dumpversion`
fi
else
# Prefer clang on Appple platforms
if [ -f /usr/bin/clang ] ; then
echo "clang version: `clang --version`"
PLATFORM_NAME=`clang -dumpmachine`-clang-`clang -dumpversion`
fi
fi
else
# Use compiler specified by ${CC} environment variable
IFS=- read $PLATFORM_NAME $COMPILER_VERSION <<< "${CC}"
echo "CC version: `${CC} --version`"
PLATFORM_NAME=$PLATFORM_NAME-`${CC} -dumpversion`
fi
if [ -z "${PLATFORM_NAME}" ] ; then
# Default configuration name for unknown compiler
# could be overridden by setting env var explicitly
PLATFORM_NAME=unknown-0
fi
}
##
## Detect Host OS, install tools and detect compiler
##
function install_tools {
# Marker file to signal that the tools have been already installed (save build time for incremental builds)
BUILDTOOLS_FILE=`pwd`/.buildtools
# Host OS detection
OS_NAME=`uname -a`
case "$OS_NAME" in
*Darwin*)
export APPLE=1
# Set target MacOS minver
export MACOSX_DEPLOYMENT_TARGET=10.10
install_mac_tools ;;
*Linux*)
export LINUX=1
[[ -z "$NOROOT" ]] && install_linux_tools || echo "No root. Skipping build tools installation." ;;
*)
echo "WARNING: unsupported OS $OS_NAME. Skipping build tools installation." ;;
esac
detect_compiler
}
##
## Parse arguments
##
function parse_args {
# Build debug build by default
if [ "$1" == "release" ] ; then
BUILD_TYPE="release"
else
BUILD_TYPE="debug"
fi
}
################################################################################################################
## Switch to workspace root directory first
DIR="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
WORKSPACE_ROOT=$DIR/..
cd $WORKSPACE_ROOT
echo "Current directory is `pwd`"
# Parse command line arguments
parse_args
# Install the necessary build tools if needed
[[ -z "$NOROOT" ]] && install_tools || echo "No root: skipping build tools installation."
# Build given configuration. Default configuration is ABI-stable 'nostd::' classes.
# Please refer to CMakeLists.txt for the list of supported build configurations.
BUILD_CONFIG=${1-nostd}
shift
BUILD_OPTIONS="$@"
build
runtests

3
tools/download.cmd Normal file
Просмотреть файл

@ -0,0 +1,3 @@
@REM This script allows to download a file to local machine. First argument is URL
set "PATH=%SystemRoot%;%SystemRoot%\System32;%SystemRoot%\System32\WindowsPowerShell\v1.0\;%ProgramFiles%\Git\bin"
@powershell -File Download.ps1 %1

4
tools/download.ps1 Normal file
Просмотреть файл

@ -0,0 +1,4 @@
$url=$args[0]
$arr=$args[0].Split("/")
$fileName=$arr[$arr.Count-1]
Invoke-WebRequest -Uri $url -OutFile $fileName -UseBasicParsing

Просмотреть файл

@ -6,7 +6,7 @@ fi
set -e
FIND="find . -name .git -prune -o -name _deps -prune -o -name .build -prune -o"
FIND="find . -name third_party -prune -o -name tools -prune -o -name .git -prune -o -name _deps -prune -o -name .build -prune -o -name out -prune -o"
# GNU syntax.
SED=(sed -i)

0
tools/git-cl.sh Normal file → Executable file
Просмотреть файл

Просмотреть файл

@ -0,0 +1,39 @@
set "PATH=%SystemRoot%;%SystemRoot%\System32;%SystemRoot%\System32\WindowsPowerShell\v1.0\;%ProgramFiles%\Git\bin"
cd %~dp0
call powershell -File .\install_llvm-win64.ps1
REM Download Visual Studio LLVM extension required for clang build to succeed
call download.cmd https://llvmextensions.gallerycdn.vsassets.io/extensions/llvmextensions/llvm-toolchain/1.0.363769/1560930595399/llvm.vsix
REM Install optional components required for ARM build - vs2017-BuildTools
IF EXIST "%ProgramFiles(x86)%\Microsoft Visual Studio\2017\BuildTools" (
"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vs_installer.exe" ^
modify --installPath "%ProgramFiles(x86)%\Microsoft Visual Studio\2017\BuildTools" -q ^
--add Microsoft.VisualStudio.Component.VC.ATL ^
--add Microsoft.VisualStudio.Component.VC.ATL.ARM ^
--add Microsoft.VisualStudio.Component.VC.ATL.ARM64
"%ProgramFiles(x86)%\Microsoft Visual Studio\2017\BuildTools\Common7\IDE\VSIXInstaller.exe" /q /a llvm.vsix
)
REM Install optional components required for ARM build - vs2017-Enterprise
IF EXIST "%ProgramFiles(x86)%\Microsoft Visual Studio\2017\Enterprise" (
"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vs_installer.exe" ^
modify --installPath "%ProgramFiles(x86)%\Microsoft Visual Studio\2017\Enterprise" -q ^
--add Microsoft.VisualStudio.Component.VC.ATL ^
--add Microsoft.VisualStudio.Component.VC.ATL.ARM ^
--add Microsoft.VisualStudio.Component.VC.ATL.ARM64
"%ProgramFiles(x86)%\Microsoft Visual Studio\2017\Enterprise\Common7\IDE\VSIXInstaller.exe" /q /a llvm.vsix
)
REM Install optional components required for ARM build - vs2019-Enterprise
IF EXIST %ProgramFiles(x86)%\Microsoft Visual Studio\2019\Enterprise (
"C:\Program Files (x86)\Microsoft Visual Studio\Installer\vs_installer.exe" ^
modify --installPath "%ProgramFiles(x86)%\Microsoft Visual Studio\2019\Enterprise" -q ^
--add Microsoft.VisualStudio.Component.VC.ATL ^
--add Microsoft.VisualStudio.Component.VC.ATL.ARM ^
--add Microsoft.VisualStudio.Component.VC.ATL.ARM64
"%ProgramFiles(x86)%\Microsoft Visual Studio\2019\Enterprise\Common7\IDE\VSIXInstaller.exe" /q /a llvm.vsix
)
REM Ignore failures if components have been already installed
EXIT /b 0

13
tools/install.sh Normal file
Просмотреть файл

@ -0,0 +1,13 @@
#!/bin/bash
DIR="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
WORKSPACE_ROOT=$DIR/..
pushd $WORKSPACE_ROOT
OPENTELEMETRY_INSTALL_DIR=${1-/usr/local}
echo "Install SDK to $OPENTELEMETRY_INSTALL_DIR"
mkdir -p $OPENTELEMETRY_INSTALL_DIR/lib
# TODO: install libraries?
cp -R api/include $OPENTELEMETRY_INSTALL_DIR
cp -R sdk/include/opentelemetry/sdk $OPENTELEMETRY_INSTALL_DIR/include/opentelemetry
mkdir -p $OPENTELEMETRY_INSTALL_DIR/lib
echo dummy > $OPENTELEMETRY_INSTALL_DIR/lib/libopentelemetry.stub
popd

Просмотреть файл

@ -0,0 +1,8 @@
$llvmVersion = "10.0.0"
Write-Host "Installing LLVM $llvmVersion ..." -ForegroundColor Cyan
Write-Host "Downloading..."
$exePath = "$env:temp\LLVM-$llvmVersion-win32.exe"
(New-Object Net.WebClient).DownloadFile("https://github.com/llvm/llvm-project/releases/download/llvmorg-$llvmVersion/LLVM-$llvmVersion-win32.exe", $exePath)
Write-Host "Installing..."
cmd /c start /wait $exePath /S
Write-Host "Installed" -ForegroundColor Green

Просмотреть файл

@ -0,0 +1,8 @@
$llvmVersion = "10.0.0"
Write-Host "Installing LLVM $llvmVersion ..." -ForegroundColor Cyan
Write-Host "Downloading..."
$exePath = "$env:temp\LLVM-$llvmVersion-win64.exe"
(New-Object Net.WebClient).DownloadFile("https://github.com/llvm/llvm-project/releases/download/llvmorg-$llvmVersion/LLVM-$llvmVersion-win64.exe", $exePath)
Write-Host "Installing..."
cmd /c start /wait $exePath /S
Write-Host "Installed" -ForegroundColor Green

Просмотреть файл

@ -0,0 +1,5 @@
Source: benchmark
Version: 1.5.1
Homepage: https://github.com/google/benchmark
Description: A library to support the benchmarking of functions, similar to unit-tests.
Supports: !uwp

Просмотреть файл

@ -0,0 +1,49 @@
if(VCPKG_CMAKE_SYSTEM_NAME STREQUAL "WindowsStore")
message(FATAL_ERROR "${PORT} does not currently support UWP")
endif()
if (VCPKG_PLATFORM_TOOLSET STREQUAL "v140")
# set(CMAKE_C_COMPILER_WORKS 1)
# set(CMAKE_CXX_COMPILER_WORKS 1)
set(CMAKE_C_COMPILER cl.exe)
set(CMAKE_CXX_COMPILER cl.exe)
set(MSVC_TOOLSET_VERSION 140)
# set(VCPKG_VISUAL_STUDIO_PATH "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0")
# set(VCPKG_PLATFORM_TOOLSET v140)
else()
# Make sure vs2019 compiled binaries are compat with vs2017
set(VCPKG_CXX_FLAGS "/Zc:__cplusplus /d2FH4-")
set(VCPKG_C_FLAGS "/Zc:__cplusplus /d2FH4-")
set(PREFER PREFER_NINJA)
endif()
include(vcpkg_common_functions)
vcpkg_check_linkage(ONLY_STATIC_LIBRARY)
vcpkg_from_github(
OUT_SOURCE_PATH SOURCE_PATH
REPO google/benchmark
HEAD_REF master
)
vcpkg_configure_cmake(
SOURCE_PATH ${SOURCE_PATH}
${PREFER}
OPTIONS
-DBENCHMARK_ENABLE_TESTING=OFF
-DCMAKE_DEBUG_POSTFIX=d
)
vcpkg_install_cmake()
vcpkg_copy_pdbs()
vcpkg_fixup_cmake_targets(CONFIG_PATH lib/cmake/benchmark)
file(REMOVE_RECURSE ${CURRENT_PACKAGES_DIR}/debug/include)
file(REMOVE_RECURSE ${CURRENT_PACKAGES_DIR}/debug/share)
# Handle copyright
file(COPY ${SOURCE_PATH}/LICENSE DESTINATION ${CURRENT_PACKAGES_DIR}/share/benchmark)
file(RENAME ${CURRENT_PACKAGES_DIR}/share/benchmark/LICENSE ${CURRENT_PACKAGES_DIR}/share/benchmark/copyright)

Просмотреть файл

@ -0,0 +1,10 @@
Source: opentelemetry
Version: 0.0.1
Homepage: https://github.com/open-telemetry/opentelemetry-cpp
Description: OpenTelemetry C++ SDK
Feature: stdlib
Description: Build OpenTelemetry with Standard Library classes
Feature: abseil
Description: Build OpenTelemetry with Abseil classes

Просмотреть файл

@ -0,0 +1,7 @@
# TODO
- Consider adding the following line to portfile.cmake
```
Build-Depends: curl[ssl], nlohmann-json
```

Просмотреть файл

@ -0,0 +1,6 @@
#!/bin/bash
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
pushd $DIR/../../ > /dev/null
basename -s .git `git config --get remote.origin.url`
popd > /dev/null

Просмотреть файл

@ -0,0 +1,76 @@
include(vcpkg_common_functions)
message("CURRENT_PACKAGES_DIR = ${CURRENT_PACKAGES_DIR}")
message("CMAKE_CURRENT_SOURCE_DIR = ${CMAKE_CURRENT_SOURCE_DIR}")
message("CMAKE_MODULE_PATH = ${CMAKE_MODULE_PATH}")
message("CMAKE_CURRENT_LIST_DIR = ${CMAKE_CURRENT_LIST_DIR}")
message("VCPKG_TARGET_TRIPLET = ${VCPKG_TARGET_TRIPLET}")
message("VCPKG_CMAKE_SYSTEM_NAME = ${VCPKG_CMAKE_SYSTEM_NAME}")
message("VCPKG_LIBRARY_LINKAGE = ${VCPKG_LIBRARY_LINKAGE}")
vcpkg_check_features(
OUT_FEATURE_OPTIONS FEATURE_OPTIONS
stdlib WITH_STDLIB
abseil WITH_ABSEIL
)
# TODO: if building dynamic, use portable ABI. if building static, use STDLIB
string(COMPARE EQUAL "${VCPKG_LIBRARY_LINKAGE}" "dynamic" BUILD_SHARED)
string(COMPARE EQUAL "${VCPKG_LIBRARY_LINKAGE}" "static" BUILD_STATIC)
if (NOT DEFINED WIN32)
execute_process(COMMAND "${CMAKE_CURRENT_LIST_DIR}/get_repo_name.sh" OUTPUT_VARIABLE REPO_NAME ERROR_QUIET)
message("REPO_NAME=${REPO_NAME}")
endif()
if (DEFINED REPO_NAME)
# Use local snapshot since we already cloned the code
get_filename_component(SOURCE_PATH "${CMAKE_CURRENT_LIST_DIR}/../../../" ABSOLUTE)
message("Using local source snapshot from ${SOURCE_PATH}")
else()
# Fetch from GitHub master
message("Fetching source code from GitHub...")
vcpkg_from_github(
OUT_SOURCE_PATH SOURCE_PATH
REPO open-telemetry/opentelemetry-cpp
HEAD_REF master
)
endif()
# TODO: it will be slightly cleaner to perform pure CMake or Ninja build, by describing all possible variable options
# as separate triplets. Since we have a fairly non-trivial build logic in build.sh script - we use it as-is for now.
# build.sh itself should check if we are building under vcpkg and avoid installing deps that are coming from vcpkg.
if (UNIX)
set(ENV{USE_VCPKG} 1)
set(ENV{NOROOT} 1)
# Custom options could be passed to COMMAND, e.g.
# vcpkg_execute_build_process(
# COMMAND ${SOURCE_PATH}/tools/build.sh nostd-vcpkg -DBUILD_TESTING=OFF
# ...
#
vcpkg_execute_build_process(
COMMAND ${SOURCE_PATH}/tools/build.sh nostd-vcpkg ${FEATURE_OPTIONS}
WORKING_DIRECTORY ${SOURCE_PATH}/
LOGNAME build
)
vcpkg_execute_build_process(
COMMAND ${SOURCE_PATH}/tools/install.sh ${CURRENT_PACKAGES_DIR}
WORKING_DIRECTORY ${SOURCE_PATH}/
LOGNAME install
)
else()
# TODO: verify Windows build
vcpkg_execute_build_process(
COMMAND ${SOURCE_PATH}/tools/build.cmd
WORKING_DIRECTORY ${SOURCE_PATH}/
LOGNAME build
# TODO: add Windows headers installation step
)
endif()
file(INSTALL ${SOURCE_PATH}/LICENSE DESTINATION ${CURRENT_PACKAGES_DIR}/share/${PORT} RENAME copyright)

Просмотреть файл

@ -0,0 +1,9 @@
--head
--overlay-ports=tools/ports
gtest
benchmark
ms-gsl
nlohmann-json
abseil
protobuf
opentelemetry

Просмотреть файл

@ -0,0 +1,9 @@
--head
--overlay-ports=tools/ports
gtest
benchmark
ms-gsl
nlohmann-json
abseil
protobuf
opentelemetry

2
tools/setup-buildtools-mac.sh Normal file → Executable file
Просмотреть файл

@ -24,8 +24,10 @@ sudo chown -R $(whoami) /usr/local/var/homebrew
sudo chown -R $(whoami) /usr/local/etc/bash_completion.d /usr/local/include /usr/local/lib/pkgconfig /usr/local/share/aclocal /usr/local/share/locale /usr/local/share/zsh /usr/local/share/zsh/site-functions /usr/local/var/homebrew/locks
brew install cmake
brew install coreutils
brew install wget
brew install clang-format
brew install google-benchmark
brew tap nlohmann/json
brew install nlohmann-json
brew install abseil

Просмотреть файл

@ -0,0 +1,55 @@
@echo off
set "PATH=%ProgramFiles%\CMake\bin;%~dp0;%~dp0\vcpkg;%PATH%"
pushd %~dp0
net session >nul 2>&1
if %errorLevel% == 0 (
echo Running with Administrative privilege...
REM Fail if chocolatey is not installed
where /Q choco
if ERRORLEVEL 1 (
echo This script requires chocolatey. Installation instructions: https://chocolatey.org/docs/installation
exit -1
)
REM Install tools needed for building, but only if not installed yet
where /Q vswhere || choco install -y vswhere
where /Q cmake || choco install -y cmake
where /Q git || choco install -y git
) else (
echo Running without Administrative privilege...
)
REM Print current Visual Studio installations detected
where /Q vswhere
if ERRORLEVEL 0 (
echo Visual Studio installations detected:
vswhere -property installationPath
)
REM Try to autodetect Visual Studio
call "%~dp0\vcvars.cmd"
if "%TOOLS_VS_NOTFOUND%" == "1" (
REM Cannot detect MSBuild path
REM TODO: no command line tools..
REM TODO: use MSBuild from vswhere?
)
where /Q vcpkg.exe
if ERRORLEVEL 1 (
REM Build our own vcpkg from source
pushd .\vcpkg
call bootstrap-vcpkg.bat
popd
) else (
echo Using existing vcpkg installation...
)
REM Install dependencies
vcpkg install gtest:x64-windows
vcpkg install --head --overlay-ports=%~dp0\ports benchmark:x64-windows
vcpkg install ms-gsl:x64-windows
vcpkg install nlohmann-json:x64-windows
vcpkg install abseil:x64-windows
vcpkg install protobuf:x64-windows
popd
exit /b 0

33
tools/setup-buildtools.sh Normal file → Executable file
Просмотреть файл

@ -1,6 +1,9 @@
#!/bin/sh
#!/bin/bash
if [ -f /bin/yum ]; then
# Switch to workspace root directory first
DIR="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
if [ -f /bin/yum ] ; then
# Prefer yum over apt-get
yum -y install automake
yum -y install autoconf
@ -22,30 +25,12 @@ yum -y install devtoolset-7-valgrind
yum-config-manager --enable rhel-server-rhscl-7-rpms
if [ `gcc --version | grep 7` == "" ]; then
echo "*********************************************************"
echo "*** Please make sure you start the build with gcc-7 ***"
echo "*** > scl enable devtoolset-7 ./build.sh ***"
echo "*********************************************************"
exit 3
fi
if [ `cmake --version | grep 3` == "" ]; then
yum -y remove cmake
wget https://cmake.org/files/v3.6/cmake-3.6.2.tar.gz
tar -zxvf cmake-3.6.2.tar.gz
cd cmake-3.6.2
./bootstrap --prefix=/usr/local
make
make install
cd ..
fi
else
# Use apt-get
export DEBIAN_FRONTEND=noninteractive
apt-get update -y
apt-get install -qq automake
apt-get install -qq bc
apt-get install -qq libtool-bin
apt-get install -qq cmake
apt-get install -qq curl
@ -59,7 +44,13 @@ apt-get install -qq libsqlite3-dev
#apt install libsqlite3-dev
apt-get install -qq wget
apt-get install -qq clang-format
apt-get install -qq libgtest-dev
apt-get install -qq libbenchmark-dev
apt-get install -qq nlohmann-json-dev
fi
# Build and install latest CMake
$DIR/setup-cmake.sh
## Change owner from root to current dir owner
chown -R `stat . -c %u:%g` *

58
tools/setup-cmake.sh Executable file
Просмотреть файл

@ -0,0 +1,58 @@
#!/bin/bash
#
# This script installs latest CMake on Linux machine
#
export PATH=/usr/local/bin:$PATH
# Min required CMake version
export CMAKE_MIN_VERSION=${1:-3.1.0}
# Target version to install if min required is not found
export CMAKE_VERSION=${2:-3.18.4}
UPGRADE_NEEDED=no
function splitVersion {
pattern='([^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]'
v1=$(cut -d '.' -f 1 <<< $ver )
v2=$(cut -d '.' -f 2 <<< $ver )
v3=$(cut -d '.' -f 3 <<< $ver )
}
function checkVersion {
# Current CMake version
currVer=`cmake --version | grep version | cut -d' ' -f 3`
ver=$currVer splitVersion
cv1=$v1
cv2=$v2
cv3=$v3
cv=`echo "65536*$v1+256*$v2+$v3" | bc`
# New CMake version
ver=$CMAKE_MIN_VERSION splitVersion
nv=`echo "65536*$v1+256*$v2+$v3" | bc`
if [ "$cv" -ge "$nv" ]; then
echo "CMake is already installed: $currVer"
else
UPGRADE_NEEDED=yes
fi
}
checkVersion
if [[ "$UPGRADE_NEEDED" == "no" ]]; then
echo "Skipping CMake installation"
exit 0
fi
# Download cmake to /tmp
pushd /tmp
if [[ ! -f "/tmp/cmake.tar.gz" ]]; then
wget -O /tmp/cmake.tar.gz https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}.tar.gz
tar -zxvf /tmp/cmake.tar.gz
fi
# Bootstrap CMake
cd cmake-${CMAKE_VERSION}
./bootstrap --prefix=/usr/local
# Build CMake without CMake and without Ninja (slow)
make
make install
popd

0
tools/setup-devenv.sh Normal file → Executable file
Просмотреть файл

6
tools/setup-protobuf.sh Executable file
Просмотреть файл

@ -0,0 +1,6 @@
#!/bin/bash
pushd /tmp
curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v3.14.0/protoc-3.14.0-linux-x86_64.zip
unzip -o protoc-3.14.0-linux-x86_64.zip -d /usr/local/
update-alternatives --install /usr/bin/protoc protoc /usr/local/bin/protoc 1 --force
popd

1
tools/vcpkg Submodule

@ -0,0 +1 @@
Subproject commit e9f8cc67a5e5541973e53ac03f88adb45cc1b21b

86
tools/vcvars.cmd Normal file
Просмотреть файл

@ -0,0 +1,86 @@
@echo off
REM
REM Make sure to enable the 'Visual C++ ATL' components for all platforms during the setup.
REM
REM This build script auto-detects and configures Visual Studio in the following order:
REM 1. Visual Studio 2017 Enterprise
REM 2. Visual Studio 2017 BuildTools
REM 3. Visual Studio 2019 Enterprise
REM 4. Visual Studio 2019 Community
REM 5. Visual Studio 2019 BuildTools
REM
REM 1st parameter - Visual Studio version
if "%1" neq "" (
goto %1
)
if "%VS_TOOLS_VERSION%" neq "" (
goto %VS_TOOLS_VERSION%
)
REM vs2017 Enterprise
:vs2017
:vs2017_enterprise
set TOOLS_VS2017_ENTERPRISE="%ProgramFiles(x86)%\Microsoft Visual Studio\2017\Enterprise\Common7\Tools\VsDevCmd.bat"
if exist %TOOLS_VS2017_ENTERPRISE% (
echo Building with vs2017 Enterprise...
call %TOOLS_VS2017_ENTERPRISE%
goto tools_configured
)
REM vs2017 BuildTools
:vs2017_buildtools
set TOOLS_VS2017="%ProgramFiles(x86)%\Microsoft Visual Studio\2017\BuildTools\Common7\Tools\VsDevCmd.bat"
if exist %TOOLS_VS2017% (
echo Building with vs2017 BuildTools...
call %TOOLS_VS2017%
goto tools_configured
)
REM vs2019 Enterprise
:vs2019
:vs2019_enterprise
set TOOLS_VS2019_ENTERPRISE="%ProgramFiles(x86)%\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\VsDevCmd.bat"
if exist %TOOLS_VS2019_ENTERPRISE% (
echo Building with vs2019 Enterprise...
call %TOOLS_VS2019_ENTERPRISE%
goto tools_configured
)
REM vs2019 Community
:vs2019_community
set TOOLS_VS2019_COMMUNITY="%ProgramFiles(x86)%\Microsoft Visual Studio\2019\Community\Common7\Tools\VsDevCmd.bat"
if exist %TOOLS_VS2019_COMMUNITY% (
echo Building with vs2019 Community...
call %TOOLS_VS2019_COMMUNITY%
goto tools_configured
)
REM vs2019 BuildTools
:vs2019_buildtools
set TOOLS_VS2019="%ProgramFiles(x86)%\Microsoft Visual Studio\2019\BuildTools\Common7\Tools\VsDevCmd.bat"
if exist %TOOLS_VS2019% (
echo Building with vs2019 BuildTools...
call %TOOLS_VS2019%
goto tools_configured
)
REM vs2015
:vs2015
set TOOLS_VS2015="%ProgramFiles(x86)%\Microsoft Visual Studio 14.0\VC\bin\vcvars32.bat"
if exist %TOOLS_VS2015% (
echo Building with vs2015 BuildTools...
call %TOOLS_VS2015%
set "VCPKG_VISUAL_STUDIO_PATH=%ProgramFiles(x86)%\Microsoft Visual Studio 14.0"
set VCPKG_PLATFORM_TOOLSET=v140
goto tools_configured
)
echo WARNING:*********************************************
echo WARNING: cannot auto-detect Visual Studio version !!!
echo WARNING:*********************************************
set TOOLS_VS_NOTFOUND=1
exit /b 0
:tools_configured