* Add a getApi method, listing all installed RPC method names

* Sketch RecordParams struct

* WIP

* Broken WIP

* Partial macro

* Basic examples working

* Partial file separation

* Move, rename, and fix FOR macro

* Use json get

* Build to_json from RequiredJsonFields too

* Remove unneeded pair specialisation

* Add comments, collide required and optional

* REformat

* Use new macros everywhere

* Remove unused template

* Rename getApi to listMethods

* Move frontend-specific calltypes to /rpc

* Specify GetTxHist return type

* Pretty-print client responses by default

* Add a GetSchema RPC

* Other tools demand ugly formatting by default

* mins and maxes for numerics, map of schemas

* Support _FOR_JSON_0

* Fix support for std::optional optional fields

* Test std optionals

* Define schemas for GetCommit

* More definitions for existing RPCs

* Tidy schema generation, including for vectors

* Add proper unit test

* Initial test of schema generation

* Fix failing tests

* Formatting

* Add (currently failing) test of nested structs

* Add misleadingly passing test

* Set correct expected pointers, test currently fails

* Oops - deexpand

* Correctly build pointer path for erroneous array elements

* Demonstrate invalid, not just missing, valeus

* Skeleton of json_bench

* Fix typo

* WIP

* Compare manual json parsers vs macro-defined

* mumble mumble

* Add valijson, +basic test

* Add benchmark of valijson validation

* Benchmark simple and complex structs

* Additional broken schema test

* Include pointer to parse errors

* Restore old basic translator macro

* Restore simpler macro for translators that don't need schema

* Add auto schema for private logging methods

* Add manual schema + validation for PUBLIC logging RPCs

* Match RPC format

* More RPC format fixes

* Correct scenario test target

* Add documentation entry on API schema

* Initial schema retrieval test

* Correct URLs in generated schema

* Send schema to a flat folder

* Remove unnecessary size_t max restriction

* Report non-matching schema

* Add current schemas

* Tidying

* clang-format

* Remove schema generation e2e test

* fmtlib, remove $id from schema

* Fix pointer paths
This commit is contained in:
Eddy Ashton 2019-06-05 10:36:50 +01:00 коммит произвёл GitHub
Родитель 93a09d4edb
Коммит e567277a42
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
23 изменённых файлов: 1592 добавлений и 156 удалений

Просмотреть файл

@ -66,6 +66,9 @@ if(BUILD_TESTS)
add_unit_test(map_test
${CMAKE_CURRENT_SOURCE_DIR}/src/ds/test/map_test.cpp)
add_unit_test(json_schema
${CMAKE_CURRENT_SOURCE_DIR}/src/ds/test/json_schema.cpp)
add_unit_test(kv_test
${CMAKE_CURRENT_SOURCE_DIR}/src/kv/test/kv_test.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/kv/test/kv_contention.cpp
@ -193,6 +196,7 @@ if(BUILD_TESTS)
## Picobench benchmarks
add_picobench(map_bench src/ds/test/map_bench.cpp)
add_picobench(logger_bench src/ds/test/logger_bench.cpp)
add_picobench(json_bench src/ds/test/json_bench.cpp)
add_picobench(ringbuffer_bench src/ds/test/ringbuffer_bench.cpp)
target_link_libraries(ringbuffer_bench PRIVATE
${CMAKE_THREAD_LIBS_INIT})

Просмотреть файл

@ -2,7 +2,7 @@ Logging (C++)
-------------
Overview
```````````
````````
A C++ transaction engine exposes itself to CCF by implementing:
@ -65,6 +65,37 @@ A handle can either be installed as:
- ``Read``: this handler can be executed on any node of the network.
- ``MayWrite``: the execution of this handler on a specific node depends on the value of the ``"readonly"`` paramater in the JSON-RPC command.
API Schema
..........
These handlers also demonstrate two different ways of defining schema for RPCs, and validating incoming requests against them. The record/get methods operating on public tables have manually defined schema and use [#valijson]_ for validation, returning an error if the input is not compliant with the schema:
.. literalinclude:: ../../src/apps/logging/logging.cpp
:language: cpp
:start-after: SNIPPET_START: valijson_record_public
:end-before: SNIPPET_END: valijson_record_public
:dedent: 6
This provides robust, extensible validation using the full JSON schema spec.
The methods operating on private tables use an alternative approach, with a macro-generated schema and parser converting compliant requests into a PoD C++ object:
.. literalinclude:: ../../src/apps/logging/logging_schema.h
:language: cpp
:start-after: SNIPPET_START: macro_validation_macros
:end-before: SNIPPET_END: macro_validation_macros
:dedent: 2
.. literalinclude:: ../../src/apps/logging/logging.cpp
:language: cpp
:start-after: SNIPPET_START: macro_validation_record
:end-before: SNIPPET_END: macro_validation_record
:dedent: 6
This produces validation error messages with a lower performance overhead, and ensures the schema and parsing logic stay in sync, but is only suitable for simple schema with required and optional fields of supported types.
Both approaches register their RPC's params and result schema, allowing them to be retrieved at runtime with calls to the getSchema RPC.
Build
`````
@ -88,3 +119,7 @@ This produces the enclave library ``libloggingenc.so.signed`` which can be loade
.. code-block:: bash
./cchost --enclave-file libloggingenc.so.signed [args]
.. rubric:: Footnotes
.. [#valijson] `Valijson is a header-only JSON Schema Validation library for C++11 <https://github.com/tristanpenman/valijson>`_.

Просмотреть файл

@ -1,21 +1,66 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the Apache 2.0 License.
#include "enclave/appinterface.h"
#include "logging_schema.h"
#include "node/entities.h"
#include "node/rpc/nodeinterface.h"
#include "node/rpc/userfrontend.h"
#define FMT_HEADER_ONLY
#include <fmt/format.h>
#include <valijson/adapters/nlohmann_json_adapter.hpp>
#include <valijson/schema.hpp>
#include <valijson/schema_parser.hpp>
#include <valijson/validator.hpp>
using namespace std;
using namespace nlohmann;
using namespace ccf;
namespace fmt
{
template <>
struct formatter<valijson::ValidationResults::Error>
{
template <typename ParseContext>
constexpr auto parse(ParseContext& ctx)
{
return ctx.begin();
}
template <typename FormatContext>
auto format(const valijson::ValidationResults::Error& e, FormatContext& ctx)
{
return format_to(
ctx.begin(), "[{}] {}", fmt::join(e.context, ""), e.description);
}
};
template <>
struct formatter<valijson::ValidationResults>
{
template <typename ParseContext>
constexpr auto parse(ParseContext& ctx)
{
return ctx.begin();
}
template <typename FormatContext>
auto format(const valijson::ValidationResults& vr, FormatContext& ctx)
{
return format_to(ctx.begin(), "{}", fmt::join(vr, "\n\t"));
}
};
}
namespace ccfapp
{
struct Procs
{
static constexpr auto LOG_RECORD = "LOG_record";
static constexpr auto LOG_RECORD_PUBLIC = "LOG_record_pub";
static constexpr auto LOG_GET = "LOG_get";
static constexpr auto LOG_RECORD_PUBLIC = "LOG_record_pub";
static constexpr auto LOG_GET_PUBLIC = "LOG_get_pub";
};
@ -28,28 +73,62 @@ namespace ccfapp
Table& records;
Table& public_records;
const nlohmann::json record_public_params_schema;
const nlohmann::json get_public_params_schema;
const nlohmann::json get_public_result_schema;
std::optional<std::string> validate(
const nlohmann::json& params, const nlohmann::json& j_schema)
{
valijson::Schema schema;
valijson::SchemaParser parser;
valijson::adapters::NlohmannJsonAdapter schema_adapter(j_schema);
parser.populateSchema(schema_adapter, schema);
valijson::Validator validator;
valijson::ValidationResults results;
valijson::adapters::NlohmannJsonAdapter params_adapter(params);
if (!validator.validate(schema, params_adapter, &results))
{
return fmt::format("Error during validation:\n\t{}", results);
}
return std::nullopt;
}
public:
Logger(NetworkTables& nwt, AbstractNotifier& notifier) :
UserRpcFrontend(*nwt.tables),
records(tables.create<Table>(ccf::Tables::APP)),
public_records(tables.create<Table>(
ccf::Tables::APP_PUBLIC, kv::SecurityDomain::PUBLIC))
ccf::Tables::APP_PUBLIC, kv::SecurityDomain::PUBLIC)),
record_public_params_schema(nlohmann::json::parse(j_record_public)),
get_public_params_schema(nlohmann::json::parse(j_get_public_in)),
get_public_result_schema(nlohmann::json::parse(j_get_public_out))
{
// SNIPPET_START: record
// SNIPPET_START: macro_validation_record
register_auto_schema<LoggingRecord::In, void>(Procs::LOG_RECORD);
auto record = [this](Store::Tx& tx, const nlohmann::json& params) {
const auto in = params.get<LoggingRecord::In>();
// SNIPPET_END: macro_validation_record
auto view = tx.get_view(records);
view->put(params["id"], params["msg"]);
view->put(in.id, in.msg);
return jsonrpc::success();
};
// SNIPPET_END: record
// SNIPPET_START: get
register_auto_schema<LoggingGet>(Procs::LOG_GET);
auto get = [this](Store::Tx& tx, const nlohmann::json& params) {
const auto in = params.get<LoggingGet::In>();
auto view = tx.get_view(records);
auto r = view->get(params["id"]);
auto r = view->get(in.id);
if (r.has_value())
return jsonrpc::success(r.value());
return jsonrpc::success(LoggingGet::Out{r.value()});
return jsonrpc::error(
jsonrpc::ErrorCodes::INVALID_PARAMS, "No such record");
@ -57,7 +136,22 @@ namespace ccfapp
// SNIPPET_END: get
// SNIPPET_START: record_public
// SNIPPET_START: valijson_record_public
register_schema(
Procs::LOG_RECORD_PUBLIC,
record_public_params_schema,
nlohmann::json::object());
auto record_public = [this](Store::Tx& tx, const nlohmann::json& params) {
const auto validation_error =
validate(params, record_public_params_schema);
if (validation_error.has_value())
{
return jsonrpc::error(
jsonrpc::ErrorCodes::PARSE_ERROR, *validation_error);
}
// SNIPPET_END: valijson_record_public
auto view = tx.get_view(public_records);
view->put(params["id"], params["msg"]);
return jsonrpc::success();
@ -65,12 +159,29 @@ namespace ccfapp
// SNIPPET_END: record_public
// SNIPPET_START: get_public
register_schema(
Procs::LOG_GET_PUBLIC,
get_public_params_schema,
get_public_result_schema);
auto get_public = [this](Store::Tx& tx, const nlohmann::json& params) {
const auto validation_error =
validate(params, get_public_params_schema);
if (validation_error.has_value())
{
return jsonrpc::error(
jsonrpc::ErrorCodes::PARSE_ERROR, *validation_error);
}
auto view = tx.get_view(public_records);
auto r = view->get(params["id"]);
if (r.has_value())
return jsonrpc::success(r.value());
{
auto result = nlohmann::json::object();
result["msg"] = r.value();
return jsonrpc::success(result);
}
return jsonrpc::error(
jsonrpc::ErrorCodes::INVALID_PARAMS, "No such record");

Просмотреть файл

@ -0,0 +1,92 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the Apache 2.0 License.
#pragma once
#include "ds/json.h"
namespace ccf
{
// Private record/get
// Explicit target structs, macro-generated parsers + schema
// SNIPPET_START: macro_validation_macros
struct LoggingRecord
{
struct In
{
size_t id;
std::string msg;
};
};
struct LoggingGet
{
struct In
{
size_t id;
};
struct Out
{
std::string msg;
};
};
DECLARE_REQUIRED_JSON_FIELDS(LoggingRecord::In, id, msg);
DECLARE_REQUIRED_JSON_FIELDS(LoggingGet::In, id);
DECLARE_REQUIRED_JSON_FIELDS(LoggingGet::Out, msg);
// SNIPPET_END: macro_validation_macros
// Public record/get
// Manual schemas, verified then parsed in handler
static const std::string j_record_public = R"!!!(
{
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"id": {
"type": "number"
},
"msg": {
"type": "string"
}
},
"required": [
"id",
"msg"
],
"title": "LOG_record_pub/params",
"type": "object"
}
)!!!";
static const std::string j_get_public_in = R"!!!(
{
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"id": {
"type": "number"
}
},
"required": [
"id"
],
"title": "LOG_get_pub/params",
"type": "object"
}
)!!!";
static const std::string j_get_public_out = R"!!!(
{
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"msg": {
"type": "string"
}
},
"required": [
"msg"
],
"title": "LOG_get_pub/result",
"type": "object"
}
)!!!";
}

Просмотреть файл

@ -1,16 +1,154 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the Apache 2.0 License.
#pragma once
#define FMT_HEADER_ONLY
#include <fmt/format.h>
#include <nlohmann/json.hpp>
#include <sstream>
template <typename T>
void assign_j(T& o, const nlohmann::json& j)
{
T t = j;
o = std::move(t);
o = std::move(j.get<T>());
}
#define __JSON_N( \
/** Represents a field within a JSON object. Tuples of these can be used in
* schema generation.
*/
template <typename T>
struct JsonField
{
using Target = T;
char const* name;
};
class JsonParseError : public std::invalid_argument
{
public:
std::vector<std::string> pointer_elements = {};
using std::invalid_argument::invalid_argument;
std::string pointer() const
{
return fmt::format(
"#/{}",
fmt::join(pointer_elements.crbegin(), pointer_elements.crend(), "/"));
}
};
namespace std
{
template <typename T>
inline void to_json(nlohmann::json& j, const std::optional<T>& t)
{
if (t.has_value())
{
j = t.value();
}
}
template <typename T>
inline void from_json(const nlohmann::json& j, std::optional<T>& t)
{
if (!j.is_null())
{
t = j.get<T>();
}
}
template <typename T>
inline void to_json(nlohmann::json& j, const std::vector<T>& t)
{
j = nlohmann::json::array();
for (const auto& e : t)
{
j.push_back(e);
}
}
template <typename T>
inline void from_json(const nlohmann::json& j, std::vector<T>& t)
{
if (!j.is_array())
{
throw JsonParseError("Expected array, found: " + j.dump());
}
for (auto i = 0u; i < j.size(); ++i)
{
try
{
t.push_back(j.at(i).template get<T>());
}
catch (JsonParseError& jpe)
{
jpe.pointer_elements.push_back(std::to_string(i));
throw;
}
}
}
}
/** Template specialisation must happen in the correct namespace, so
NAMESPACE_CONTAINS_JSON_TYPES must be stated within a namespace to use
DECLARE_REQUIRED_JSON_FIELDS.
*/
#define NAMESPACE_CONTAINS_JSON_TYPES \
template <typename T> \
struct RequiredJsonFields : std::false_type \
{}; \
\
template <typename T> \
struct OptionalJsonFields : std::false_type \
{}; \
\
template <typename T, bool Required> \
void write_fields(nlohmann::json& j, const T& t); \
\
template <typename T, bool Required> \
void read_fields(const nlohmann::json& j, T& t); \
\
template < \
typename T, \
typename = std::enable_if_t<RequiredJsonFields<T>::value>> \
inline void to_json(nlohmann::json& j, const T& t) \
{ \
j = nlohmann::json::object(); \
write_fields<T, true>(j, t); \
if constexpr (OptionalJsonFields<T>::value) \
{ \
write_fields<T, false>(j, t); \
} \
} \
\
template < \
typename T, \
typename = std::enable_if_t<RequiredJsonFields<T>::value>> \
inline void from_json(const nlohmann::json& j, T& t) \
{ \
if (!j.is_object()) \
{ \
throw JsonParseError("Expected object, found: " + j.dump()); \
} \
read_fields<T, true>(j, t); \
if constexpr (OptionalJsonFields<T>::value) \
{ \
read_fields<T, false>(j, t); \
} \
}
/** Global namespace and ccf namespace are initialised here
*/
NAMESPACE_CONTAINS_JSON_TYPES;
namespace ccf
{
NAMESPACE_CONTAINS_JSON_TYPES;
}
#define __FOR_JSON_NN( \
_0, \
_1, \
_2, \
_3, \
@ -33,10 +171,10 @@ void assign_j(T& o, const nlohmann::json& j)
_20, \
N, \
...) \
_JSON_##N
#define _JSON_N(args...) \
__JSON_N( \
args, \
_FOR_JSON_##N
#define _FOR_JSON_WITH_0(...) \
__FOR_JSON_NN( \
__VA_ARGS__, \
20, \
19, \
18, \
@ -56,33 +194,264 @@ void assign_j(T& o, const nlohmann::json& j)
4, \
3, \
2, \
1)
1, \
0)
#define _FOR_JSON_NN(...) _FOR_JSON_WITH_0(DUMMY, ##__VA_ARGS__)
#define TO_JSON_1(a) j[#a] = c.a;
#define FROM_JSON_1(a) assign_j(c.a, j[#a]);
#define _FOR_JSON_0(FUNC, TYPE)
#define _FOR_JSON_1(FUNC, TYPE, FIELD) FUNC##_FOR_JSON_FINAL(TYPE, FIELD)
#define _FOR_JSON_2(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_1(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_3(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_2(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_4(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_3(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_5(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_4(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_6(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_5(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_7(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_6(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_8(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_7(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_9(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_8(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_10(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_9(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_11(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_10(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_12(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_11(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_13(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_12(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_14(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_13(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_15(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_14(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_16(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_15(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_17(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_16(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_18(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_17(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_19(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_18(FUNC, TYPE, ##__VA_ARGS__)
#define _FOR_JSON_20(FUNC, TYPE, FIELD, ...) \
_FOR_JSON_NEXT(FUNC, TYPE, FIELD) _FOR_JSON_19(FUNC, TYPE, ##__VA_ARGS__)
#define _JSON_1(dir, a) dir##_JSON_1(a)
#define _JSON_2(dir, a, prev...) _JSON_1(dir, a) _JSON_1(dir, prev)
#define _JSON_3(dir, a, prev...) _JSON_1(dir, a) _JSON_2(dir, prev)
#define _JSON_4(dir, a, prev...) _JSON_1(dir, a) _JSON_3(dir, prev)
#define _JSON_5(dir, a, prev...) _JSON_1(dir, a) _JSON_4(dir, prev)
#define _JSON_6(dir, a, prev...) _JSON_1(dir, a) _JSON_5(dir, prev)
#define _JSON_7(dir, a, prev...) _JSON_1(dir, a) _JSON_6(dir, prev)
#define _JSON_8(dir, a, prev...) _JSON_1(dir, a) _JSON_7(dir, prev)
#define _JSON_9(dir, a, prev...) _JSON_1(dir, a) _JSON_8(dir, prev)
#define _JSON_10(dir, a, prev...) _JSON_1(dir, a) _JSON_9(dir, prev)
#define _JSON_11(dir, a, prev...) _JSON_1(dir, a) _JSON_10(dir, prev)
#define _JSON_12(dir, a, prev...) _JSON_1(dir, a) _JSON_11(dir, prev)
#define _JSON_13(dir, a, prev...) _JSON_1(dir, a) _JSON_12(dir, prev)
#define _JSON_14(dir, a, prev...) _JSON_1(dir, a) _JSON_13(dir, prev)
#define _JSON_15(dir, a, prev...) _JSON_1(dir, a) _JSON_14(dir, prev)
#define _JSON_16(dir, a, prev...) _JSON_1(dir, a) _JSON_15(dir, prev)
#define _JSON_17(dir, a, prev...) _JSON_1(dir, a) _JSON_16(dir, prev)
#define _JSON_18(dir, a, prev...) _JSON_1(dir, a) _JSON_17(dir, prev)
#define _JSON_19(dir, a, prev...) _JSON_1(dir, a) _JSON_18(dir, prev)
#define _JSON_20(dir, a, prev...) _JSON_1(dir, a) _JSON_19(dir, prev)
#define WRITE_REQUIRED_FOR_JSON_NEXT(TYPE, FIELD) \
{ \
j[#FIELD] = t.FIELD; \
}
#define WRITE_REQUIRED_FOR_JSON_FINAL(TYPE, FIELD) \
WRITE_REQUIRED_FOR_JSON_NEXT(TYPE, FIELD)
/** Defines from and to json functions for nlohmann::json.
#define WRITE_OPTIONAL_FOR_JSON_NEXT(TYPE, FIELD) \
{ \
if (t.FIELD != t_default.FIELD) \
{ \
j[#FIELD] = t.FIELD; \
} \
}
#define WRITE_OPTIONAL_FOR_JSON_FINAL(TYPE, FIELD) \
WRITE_OPTIONAL_FOR_JSON_NEXT(TYPE, FIELD)
#define READ_REQUIRED_FOR_JSON_NEXT(TYPE, FIELD) \
{ \
const auto it = j.find(#FIELD); \
if (it == j.end()) \
{ \
throw JsonParseError( \
"Missing required field '" #FIELD "' in object: " + j.dump()); \
} \
try \
{ \
t.FIELD = it->get<decltype(TYPE::FIELD)>(); \
} \
catch (JsonParseError & jpe) \
{ \
jpe.pointer_elements.push_back(#FIELD); \
throw; \
} \
}
#define READ_REQUIRED_FOR_JSON_FINAL(TYPE, FIELD) \
READ_REQUIRED_FOR_JSON_NEXT(TYPE, FIELD)
#define READ_OPTIONAL_FOR_JSON_NEXT(TYPE, FIELD) \
{ \
const auto it = j.find(#FIELD); \
if (it != j.end()) \
{ \
t.FIELD = it->get<decltype(TYPE::FIELD)>(); \
} \
}
#define READ_OPTIONAL_FOR_JSON_FINAL(TYPE, FIELD) \
READ_OPTIONAL_FOR_JSON_NEXT(TYPE, FIELD)
#define WRITE_BASIC_FOR_JSON_NEXT(TYPE, FIELD) j[#FIELD] = t.FIELD;
#define WRITE_BASIC_FOR_JSON_FINAL(TYPE, FIELD) \
WRITE_BASIC_FOR_JSON_NEXT(TYPE, FIELD)
#define READ_BASIC_FOR_JSON_NEXT(TYPE, FIELD) \
t.FIELD = j[#FIELD].get<decltype(TYPE::FIELD)>();
#define READ_BASIC_FOR_JSON_FINAL(TYPE, FIELD) \
READ_BASIC_FOR_JSON_NEXT(TYPE, FIELD)
#define JSON_FIELD_FOR_JSON_NEXT(TYPE, FIELD) \
JsonField<decltype(TYPE::FIELD)>{#FIELD},
#define JSON_FIELD_FOR_JSON_FINAL(TYPE, FIELD) \
JsonField<decltype(TYPE::FIELD)> \
{ \
# FIELD \
}
#define TO_JSON_FOR_JSON_NEXT(TYPE, FIELD) j[#FIELD] = c.FIELD;
#define TO_JSON_FOR_JSON_FINAL(TYPE, FIELD) TO_JSON_FOR_JSON_NEXT(TYPE, FIELD)
#define FROM_JSON_FOR_JSON_NEXT(TYPE, FIELD) \
c.FIELD = j[#FIELD].get<decltype(TYPE::FIELD)>();
#define FROM_JSON_FOR_JSON_FINAL(TYPE, FIELD) \
FROM_JSON_FOR_JSON_NEXT(TYPE, FIELD)
#define _FOR_JSON_NEXT(FUNC, TYPE, FIELD) FUNC##_FOR_JSON_NEXT(TYPE, FIELD)
#define _FOR_JSON_FINAL(FUNC, TYPE, FIELD) FUNC##_FOR_JSON_FINAL(TYPE, FIELD)
/** Defines from and to json functions for nlohmann::json with error messages on
* missing elements. Can then use OPTIONAL variant to add non-required fields.
* Only the given class members are considered. Example:
*
* struct X
* {
* int a,b;
* };
* DECLARE_REQUIRED_JSON_FIELDS(X, a, b)
*/
#define DECLARE_REQUIRED_JSON_FIELDS(TYPE, ...) \
template <> \
struct RequiredJsonFields<TYPE> : std::true_type \
{ \
static constexpr auto required_fields = std::make_tuple( \
_FOR_JSON_NN(__VA_ARGS__)(JSON_FIELD, TYPE, ##__VA_ARGS__)); \
}; \
template <> \
inline void write_fields<TYPE, true>(nlohmann::json & j, const TYPE& t) \
{ \
_FOR_JSON_NN(__VA_ARGS__)(WRITE_REQUIRED, TYPE, ##__VA_ARGS__) \
} \
template <> \
inline void read_fields<TYPE, true>(const nlohmann::json& j, TYPE& t) \
{ \
_FOR_JSON_NN(__VA_ARGS__)(READ_REQUIRED, TYPE, ##__VA_ARGS__) \
}
/** Defines from and to json functions for nlohmann::json with respect to a base
* class. Example:
*
* struct X
* {
* int a,b;
* };
* DECLARE_REQUIRED_JSON_FIELDS(X, a, b)
*
* struct Y : public X
* {
* string c;
* };
* DECLARE_REQUIRED_JSON_FIELDS_WITH_BASE(Y, X, c)
*
* This is equivalent to:
* DECLARE_REQUIRED_JSON_FIELDS(Y, a, b, c)
*/
#define DECLARE_REQUIRED_JSON_FIELDS_WITH_BASE(TYPE, BASE, ...) \
template <> \
struct RequiredJsonFields<TYPE> : std::true_type \
{ \
static constexpr auto required_fields = std::tuple_cat( \
RequiredJsonFields<BASE>::required_fields, \
std::make_tuple( \
_FOR_JSON_NN(__VA_ARGS__)(JSON_FIELD, TYPE, ##__VA_ARGS__))); \
}; \
template <> \
inline void write_fields<TYPE, true>(nlohmann::json & j, const TYPE& t) \
{ \
write_fields<BASE, true>(j, t); \
_FOR_JSON_NN(__VA_ARGS__)(WRITE_REQUIRED, TYPE, ##__VA_ARGS__) \
} \
template <> \
inline void read_fields<TYPE, true>(const nlohmann::json& j, TYPE& t) \
{ \
read_fields<BASE, true>(j, t); \
_FOR_JSON_NN(__VA_ARGS__)(READ_REQUIRED, TYPE, ##__VA_ARGS__) \
}
/** Extends existing from and to json functions for nlohmann::json.
* DECLARE_REQUIRED must already have been called for this type.
* When converting from json, missing optional fields will not cause an error
* and the field will be left with its default value.
* When converting to json, the field will only be written if its value differs
* from the default.
*
* struct X
* {
* int a,b,c,d;
* };
* DECLARE_REQUIRED_JSON_FIELDS(X, a, b)
* DECLARE_OPTIONAL_JSON_FIELDS(X, a, b, c, d)
*/
#define DECLARE_OPTIONAL_JSON_FIELDS(TYPE, ...) \
template <> \
struct OptionalJsonFields<TYPE> : std::true_type \
{ \
static constexpr auto optional_fields = std::make_tuple( \
_FOR_JSON_NN(__VA_ARGS__)(JSON_FIELD, TYPE, ##__VA_ARGS__)); \
}; \
template <> \
inline void write_fields<TYPE, false>(nlohmann::json & j, const TYPE& t) \
{ \
const TYPE t_default{}; \
{ \
_FOR_JSON_NN(__VA_ARGS__)(WRITE_OPTIONAL, TYPE, ##__VA_ARGS__) \
} \
} \
template <> \
inline void read_fields<TYPE, false>(const nlohmann::json& j, TYPE& t) \
{ \
{ \
_FOR_JSON_NN(__VA_ARGS__)(READ_OPTIONAL, TYPE, ##__VA_ARGS__) \
} \
}
/** Extends existing from and to json functions for nlohmann::json with respect
* to a base class.
*/
#define DECLARE_OPTIONAL_JSON_FIELDS_WITH_BASE(TYPE, BASE, ...) \
template <> \
struct OptionalJsonFields<TYPE> : std::true_type \
{ \
static constexpr auto optional_fields = std::tuple_cat( \
OptionalJsonFields<BASE>::optional_fields, \
std::make_tuple( \
_FOR_JSON_NN(__VA_ARGS__)(JSON_FIELD, TYPE, ##__VA_ARGS__))); \
}; \
template <> \
inline void write_fields<TYPE, false>(nlohmann::json & j, const TYPE& t) \
{ \
const TYPE t_default{}; \
write_fields<BASE, false>(j, t); \
{ \
_FOR_JSON_NN(__VA_ARGS__)(WRITE_OPTIONAL, TYPE, ##__VA_ARGS__) \
} \
} \
template <> \
inline void read_fields<TYPE, false>(const nlohmann::json& j, TYPE& t) \
{ \
read_fields<BASE, false>(j, t); \
{ \
_FOR_JSON_NN(__VA_ARGS__)(READ_OPTIONAL, TYPE, ##__VA_ARGS__) \
} \
}
/** Defines simple from and to json functions for nlohmann::json.
* Every class that is to be read from Lua needs to have these.
* Only the given class members are considered. Example:
*
@ -91,20 +460,19 @@ void assign_j(T& o, const nlohmann::json& j)
* int a,b;
* };
* ADD_JSON_TRANSLATORS(X, a, b)
*
*/
#define ADD_JSON_TRANSLATORS(C, attr...) \
inline void from_json(const nlohmann::json& j, C& c) \
#define ADD_JSON_TRANSLATORS(TYPE, ...) \
inline void from_json(const nlohmann::json& j, TYPE& t) \
{ \
_JSON_N(attr)(FROM, attr) \
_FOR_JSON_NN(__VA_ARGS__)(READ_BASIC, TYPE, ##__VA_ARGS__) \
} \
inline void to_json(nlohmann::json& j, const C& c) \
inline void to_json(nlohmann::json& j, const TYPE& t) \
{ \
_JSON_N(attr)(TO, attr) \
_FOR_JSON_NN(__VA_ARGS__)(WRITE_BASIC, TYPE, ##__VA_ARGS__) \
}
/** Defines from and to json functions for nlohmann::json with respect to a base
* class. Example:
/** Defines simple from and to json functions for nlohmann::json with respect to
* a base class. Example:
*
* struct X
* {
@ -121,29 +489,14 @@ void assign_j(T& o, const nlohmann::json& j)
* This is equivalent to:
* ADD_JSON_TRANSLATORS(Y, a, b, c)
*/
#define ADD_JSON_TRANSLATORS_WITH_BASE(C, B, attr...) \
inline void from_json(const nlohmann::json& j, C& c) \
#define ADD_JSON_TRANSLATORS_WITH_BASE(TYPE, B, ...) \
inline void from_json(const nlohmann::json& j, TYPE& t) \
{ \
from_json(j, static_cast<B&>(c)); \
_JSON_N(attr)(FROM, attr) \
from_json(j, static_cast<B&>(t)); \
_FOR_JSON_NN(__VA_ARGS__)(READ_BASIC, TYPE, ##__VA_ARGS__) \
} \
inline void to_json(nlohmann::json& j, const C& c) \
inline void to_json(nlohmann::json& j, const TYPE& t) \
{ \
to_json(j, static_cast<const B&>(c)); \
_JSON_N(attr)(TO, attr) \
to_json(j, static_cast<const B&>(t)); \
_FOR_JSON_NN(__VA_ARGS__)(WRITE_BASIC, TYPE, ##__VA_ARGS__) \
}
template <typename K, typename V>
void to_json(nlohmann::json& j, const std::pair<K, V>& p)
{
j = nlohmann::json::array({p.first, p.second});
}
template <typename K, typename V>
void from_json(const nlohmann::json& j, std::pair<K, V>& p)
{
assert(j.is_array() && j.size() == 2);
p.first = j.at(0);
p.second = j.at(1);
}

137
src/ds/json_schema.h Normal file
Просмотреть файл

@ -0,0 +1,137 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the Apache 2.0 License.
#pragma once
#include "json.h"
namespace ccf
{
namespace
{
template <typename T, template <typename...> class U>
struct is_specialization : std::false_type
{};
template <template <typename...> class T, typename... Args>
struct is_specialization<T<Args...>, T> : std::true_type
{};
template <typename T>
struct dependent_false : public std::false_type
{};
};
template <typename T>
inline nlohmann::json schema_properties_element_numeric()
{
nlohmann::json element;
element["type"] = "number";
element["minimum"] = std::numeric_limits<T>::min();
element["maximum"] = std::numeric_limits<T>::max();
return element;
}
template <typename T>
nlohmann::json schema_properties_element();
template <
typename T,
typename = std::enable_if_t<RequiredJsonFields<T>::value>>
inline void fill_schema(nlohmann::json& schema)
{
schema["type"] = "object";
nlohmann::json required = nlohmann::json::array();
nlohmann::json properties;
// For all required fields, add the name of the field to required and the
// schema for the field to properties
std::apply(
[&required, &properties](const auto&... field) {
((required.push_back(field.name),
properties[field.name] = schema_properties_element<
typename std::decay_t<decltype(field)>::Target>()),
...);
},
RequiredJsonFields<T>::required_fields);
// Add all optional fields to properties
if constexpr (OptionalJsonFields<T>::value)
{
std::apply(
[&properties](const auto&... field) {
((properties[field.name] = schema_properties_element<
typename std::decay_t<decltype(field)>::Target>()),
...);
},
OptionalJsonFields<T>::optional_fields);
}
schema["required"] = required;
schema["properties"] = properties;
}
template <typename T>
inline nlohmann::json schema_properties_element()
{
if constexpr (is_specialization<T, std::optional>::value)
{
return schema_properties_element<typename T::value_type>();
}
else if constexpr (is_specialization<T, std::vector>::value)
{
nlohmann::json element;
element["type"] = "array";
element["items"] = schema_properties_element<typename T::value_type>();
return element;
}
else if constexpr (std::is_same<T, std::string>::value)
{
nlohmann::json element;
element["type"] = "string";
return element;
}
else if constexpr (std::is_same<T, bool>::value)
{
nlohmann::json element;
element["type"] = "boolean";
return element;
}
else if constexpr (std::is_same<T, nlohmann::json>::value)
{
// Any field that contains more json is completely unconstrained
return nlohmann::json::object();
}
else if constexpr (std::is_integral<T>::value)
{
return schema_properties_element_numeric<T>();
}
else if constexpr (RequiredJsonFields<T>::value)
{
auto schema = nlohmann::json::object();
fill_schema<T>(schema);
return schema;
}
else
{
static_assert(
dependent_false<T>::value,
"Unsupported type - can't create schema element");
return nullptr;
}
}
template <
typename T,
typename = std::enable_if_t<RequiredJsonFields<T>::value>>
inline nlohmann::json build_schema(const std::string& title)
{
nlohmann::json schema;
schema["$schema"] = "http://json-schema.org/draft-07/schema#";
schema["title"] = title;
fill_schema<T>(schema);
return schema;
}
}

268
src/ds/test/json_bench.cpp Normal file
Просмотреть файл

@ -0,0 +1,268 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the Apache 2.0 License.
#include "../json.h"
#include "../json_schema.h"
#define PICOBENCH_IMPLEMENT_WITH_MAIN
#include <picobench/picobench.hpp>
#include <valijson/adapters/nlohmann_json_adapter.hpp>
#include <valijson/schema.hpp>
#include <valijson/schema_parser.hpp>
#include <valijson/validator.hpp>
template <class A>
inline void do_not_optimize(A const& value)
{
asm volatile("" : : "r,m"(value) : "memory");
}
inline void clobber_memory()
{
asm volatile("" : : : "memory");
}
void randomise(std::string& s)
{
s.resize(rand() % 20);
for (auto& c : s)
{
c = 'a' + rand() % 26;
}
}
void randomise(size_t& n)
{
n = rand();
}
void randomise(int& n)
{
n = rand();
}
void randomise(bool& b)
{
b = rand() % 2;
}
#define DECLARE_SIMPLE_STRUCT(PREFIX) \
struct Simple_##PREFIX \
{ \
size_t x; \
int y; \
void randomise() \
{ \
::randomise(x); \
::randomise(y); \
} \
};
#define DECLARE_COMPLEX_STRUCT(PREFIX) \
struct Complex_##PREFIX \
{ \
struct Foo \
{ \
size_t n; \
std::string s; \
void randomise() \
{ \
::randomise(n); \
::randomise(s); \
} \
}; \
struct Bar \
{ \
size_t a; \
size_t b; \
std::vector<Foo> foos; \
void randomise() \
{ \
::randomise(a); \
::randomise(b); \
foos.resize(rand() % 20); \
for (auto& e : foos) \
{ \
e.randomise(); \
} \
} \
}; \
bool b; \
int i; \
std::string s; \
std::vector<Bar> bars; \
void randomise() \
{ \
::randomise(b); \
::randomise(i); \
::randomise(s); \
bars.resize(rand() % 20); \
for (auto& e : bars) \
{ \
e.randomise(); \
} \
} \
};
namespace ccf
{
DECLARE_SIMPLE_STRUCT(manual)
void to_json(nlohmann::json& j, const Simple_manual& s)
{
j["x"] = s.x;
j["y"] = s.y;
}
void from_json(const nlohmann::json& j, Simple_manual& s)
{
s.x = j["x"];
s.y = j["y"];
}
DECLARE_COMPLEX_STRUCT(manual)
void to_json(nlohmann::json& j, const Complex_manual::Foo& f)
{
j["n"] = f.n;
j["s"] = f.s;
}
void to_json(nlohmann::json& j, const Complex_manual::Bar& b)
{
j["a"] = b.a;
j["b"] = b.b;
j["foos"] = b.foos;
}
void to_json(nlohmann::json& j, const Complex_manual& c)
{
j["b"] = c.b;
j["i"] = c.i;
j["s"] = c.s;
j["bars"] = c.bars;
}
void from_json(const nlohmann::json& j, Complex_manual::Foo& f)
{
f.n = j["n"];
f.s = j["s"];
}
void from_json(const nlohmann::json& j, Complex_manual::Bar& b)
{
b.a = j["a"];
b.b = j["b"];
b.foos = j["foos"].get<decltype(b.foos)>();
}
void from_json(const nlohmann::json& j, Complex_manual& c)
{
c.b = j["b"];
c.i = j["i"];
c.s = j["s"];
c.bars = j["bars"].get<decltype(c.bars)>();
}
DECLARE_SIMPLE_STRUCT(macros)
DECLARE_REQUIRED_JSON_FIELDS(Simple_macros, x, y);
DECLARE_COMPLEX_STRUCT(macros)
DECLARE_REQUIRED_JSON_FIELDS(Complex_macros::Foo, n, s);
DECLARE_REQUIRED_JSON_FIELDS(Complex_macros::Bar, a, b, foos);
DECLARE_REQUIRED_JSON_FIELDS(Complex_macros, b, i, s, bars);
}
using namespace ccf;
template <typename T, typename R = T>
std::vector<R> build_entries(picobench::state& s)
{
std::vector<R> entries(s.iterations());
for (auto& e : entries)
{
T t;
t.randomise();
e = t;
}
return entries;
}
template <typename T>
static void conv(picobench::state& s)
{
std::vector<T> entries = build_entries<T>(s);
clobber_memory();
picobench::scope scope(s);
for (size_t i = 0; i < s.iterations(); ++i)
{
nlohmann::json j = entries[i];
const auto b = j.get<T>();
do_not_optimize(b);
clobber_memory();
}
}
template <typename T>
void valmacro(picobench::state& s)
{
std::vector<nlohmann::json> entries = build_entries<T, nlohmann::json>(s);
clobber_memory();
picobench::scope scope(s);
for (size_t i = 0; i < s.iterations(); ++i)
{
const auto b = entries[i].get<T>();
do_not_optimize(b);
clobber_memory();
}
}
template <typename T>
void valjson(picobench::state& s)
{
std::vector<nlohmann::json> entries = build_entries<T, nlohmann::json>(s);
const auto schema_doc = ccf::build_schema<T>("Schema");
valijson::Schema schema;
valijson::SchemaParser parser;
valijson::adapters::NlohmannJsonAdapter schema_adapter(schema_doc);
parser.populateSchema(schema_adapter, schema);
valijson::Validator validator;
valijson::ValidationResults results;
clobber_memory();
picobench::scope scope(s);
for (size_t i = 0; i < s.iterations(); ++i)
{
valijson::adapters::NlohmannJsonAdapter doc_adapter(entries[i]);
const auto succeeded = validator.validate(schema, doc_adapter, &results);
do_not_optimize(succeeded);
clobber_memory();
}
}
const std::vector<int> sizes = {200, 2'000};
PICOBENCH_SUITE("simple");
PICOBENCH(conv<Simple_manual>).iterations(sizes).samples(10);
PICOBENCH(conv<Simple_macros>).iterations(sizes).samples(10);
PICOBENCH_SUITE("complex");
PICOBENCH(conv<Complex_manual>).iterations(sizes).samples(10);
PICOBENCH(conv<Complex_macros>).iterations(sizes).samples(10);
PICOBENCH_SUITE("validation simple");
PICOBENCH(valmacro<Simple_macros>).iterations(sizes).samples(10);
PICOBENCH(valjson<Simple_macros>).iterations(sizes).samples(10);
PICOBENCH_SUITE("validation complex");
PICOBENCH(valmacro<Complex_macros>).iterations(sizes).samples(10);
PICOBENCH(valjson<Complex_macros>).iterations(sizes).samples(10);

284
src/ds/test/json_schema.cpp Normal file
Просмотреть файл

@ -0,0 +1,284 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the Apache 2.0 License.
#include "../json_schema.h"
#define DOCTEST_CONFIG_IMPLEMENT_WITH_MAIN
#include <doctest/doctest.h>
#include <nlohmann/json.hpp>
#include <valijson/adapters/nlohmann_json_adapter.hpp>
#include <valijson/schema.hpp>
#include <valijson/schema_parser.hpp>
#include <valijson/validator.hpp>
#include <vector>
struct Bar
{
size_t a = {};
std::string b = {};
size_t c = {};
};
DECLARE_REQUIRED_JSON_FIELDS(Bar, a);
DECLARE_OPTIONAL_JSON_FIELDS(Bar, b, c);
TEST_CASE("basic macro parser generation")
{
const Bar default_bar = {};
nlohmann::json j;
REQUIRE_THROWS_AS(j.get<Bar>(), std::invalid_argument);
j["a"] = 42;
const Bar bar_0 = j;
REQUIRE(bar_0.a == j["a"]);
REQUIRE(bar_0.b == default_bar.b);
REQUIRE(bar_0.c == default_bar.c);
j["b"] = "Test";
j["c"] = 100;
const Bar bar_1 = j;
REQUIRE(bar_1.a == j["a"]);
REQUIRE(bar_1.b == j["b"]);
REQUIRE(bar_1.c == j["c"]);
}
struct Baz : public Bar
{
size_t d = {};
size_t e = {};
};
DECLARE_REQUIRED_JSON_FIELDS_WITH_BASE(Baz, Bar, d);
DECLARE_OPTIONAL_JSON_FIELDS_WITH_BASE(Baz, Bar, e);
TEST_CASE("macro parser generation with base classes")
{
const Baz default_baz = {};
nlohmann::json j;
REQUIRE_THROWS_AS(j.get<Baz>(), std::invalid_argument);
j["a"] = 42;
REQUIRE_THROWS_AS(j.get<Baz>(), std::invalid_argument);
j["d"] = 43;
const Baz baz_0 = j;
REQUIRE(baz_0.a == j["a"]);
REQUIRE(baz_0.b == default_baz.b);
REQUIRE(baz_0.c == default_baz.c);
REQUIRE(baz_0.d == j["d"]);
REQUIRE(baz_0.e == default_baz.e);
j["b"] = "Test";
j["c"] = 100;
j["e"] = 101;
const Baz baz_1 = j;
REQUIRE(baz_1.a == j["a"]);
REQUIRE(baz_1.b == j["b"]);
REQUIRE(baz_1.c == j["c"]);
REQUIRE(baz_1.d == j["d"]);
REQUIRE(baz_1.e == j["e"]);
}
namespace ccf
{
struct Foo
{
size_t n_0 = 42;
size_t n_1 = 43;
int i_0 = -1;
int64_t i64_0 = -2;
std::string s_0 = "Default value";
std::string s_1 = "Other default value";
std::optional<size_t> opt = std::nullopt;
std::vector<std::string> vec_s = {};
size_t ignored;
};
DECLARE_REQUIRED_JSON_FIELDS(Foo, n_0, i_0, i64_0, s_0);
DECLARE_OPTIONAL_JSON_FIELDS(Foo, n_1, s_1, opt, vec_s);
}
TEST_CASE("schema generation")
{
const auto schema = ccf::build_schema<ccf::Foo>("Foo");
const auto properties_it = schema.find("properties");
REQUIRE(properties_it != schema.end());
const auto required_it = schema.find("required");
REQUIRE(required_it != schema.end());
REQUIRE(required_it->is_array());
REQUIRE(required_it->size() == 4);
// Check limits are actually achievable
{
auto j_max = nlohmann::json::object();
auto j_min = nlohmann::json::object();
for (const std::string& required : *required_it)
{
const auto property_it = properties_it->find(required);
REQUIRE(property_it != properties_it->end());
const auto type = property_it->at("type");
if (type == "number")
{
j_min[required] = property_it->at("minimum");
j_max[required] = property_it->at("maximum");
}
else if (type == "string")
{
j_min[required] = "Hello world";
j_max[required] = "Hello world";
}
else
{
throw std::logic_error("Unsupported type");
}
}
const auto foo_min = j_min.get<ccf::Foo>();
const auto foo_max = j_max.get<ccf::Foo>();
using size_limits = std::numeric_limits<size_t>;
REQUIRE(foo_min.n_0 == size_limits::min());
REQUIRE(foo_max.n_0 == size_limits::max());
using int_limits = std::numeric_limits<int>;
REQUIRE(foo_min.i_0 == int_limits::min());
REQUIRE(foo_max.i_0 == int_limits::max());
using int64_limits = std::numeric_limits<int64_t>;
REQUIRE(foo_min.i64_0 == int64_limits::min());
REQUIRE(foo_max.i64_0 == int64_limits::max());
}
}
namespace ccf
{
struct Nest0
{
size_t n = {};
};
DECLARE_REQUIRED_JSON_FIELDS(Nest0, n);
bool operator==(const Nest0& l, const Nest0& r)
{
return l.n == r.n;
}
struct Nest1
{
Nest0 a = {};
Nest0 b = {};
};
DECLARE_REQUIRED_JSON_FIELDS(Nest1, a, b);
bool operator==(const Nest1& l, const Nest1& r)
{
return l.a == r.a && l.b == r.b;
}
struct Nest2
{
Nest1 x;
std::vector<Nest1> xs;
};
DECLARE_REQUIRED_JSON_FIELDS(Nest2, x, xs);
bool operator==(const Nest2& l, const Nest2& r)
{
return l.x == r.x && l.xs == r.xs;
}
struct Nest3
{
Nest2 v;
};
DECLARE_REQUIRED_JSON_FIELDS(Nest3, v);
bool operator==(const Nest3& l, const Nest3& r)
{
return l.v == r.v;
}
}
TEST_CASE("nested")
{
using namespace ccf;
const Nest0 n0_1{10};
const Nest0 n0_2{20};
const Nest0 n0_3{30};
const Nest0 n0_4{40};
const Nest1 n1_1{n0_1, n0_2};
const Nest1 n1_2{n0_1, n0_3};
const Nest1 n1_3{n0_1, n0_4};
const Nest1 n1_4{n0_2, n0_3};
const Nest1 n1_5{n0_3, n0_4};
const Nest1 n1_6{n0_4, n0_4};
const Nest2 n2_1{n1_1, {n1_6, n1_5, n1_4, n1_3, n1_2}};
Nest3 n3{n2_1};
nlohmann::json j = n3;
const auto r0 = j.get<Nest3>();
REQUIRE(n3 == r0);
{
auto invalid_json = j;
invalid_json["v"]["xs"][3]["a"].erase("n");
try
{
invalid_json.get<Nest3>();
}
catch (JsonParseError& jpe)
{
REQUIRE(jpe.pointer() == "#/v/xs/3/a");
}
invalid_json["v"]["xs"][3].erase("a");
try
{
invalid_json.get<Nest3>();
}
catch (JsonParseError& jpe)
{
REQUIRE(jpe.pointer() == "#/v/xs/3");
}
invalid_json["v"]["xs"][3] = "Broken";
try
{
invalid_json.get<Nest3>();
}
catch (JsonParseError& jpe)
{
REQUIRE(jpe.pointer() == "#/v/xs/3");
}
invalid_json["v"]["xs"] = "Broken";
try
{
invalid_json.get<Nest3>();
}
catch (JsonParseError& jpe)
{
REQUIRE(jpe.pointer() == "#/v/xs");
}
invalid_json["v"].erase("xs");
try
{
invalid_json.get<Nest3>();
}
catch (JsonParseError& jpe)
{
REQUIRE(jpe.pointer() == "#/v");
}
}
}

Просмотреть файл

@ -65,21 +65,4 @@ namespace ccf
std::vector<uint8_t> quote;
};
};
struct GetCommit
{
struct Out
{
uint64_t term;
int64_t commit;
};
};
struct GetMetrics
{
struct Out
{
nlohmann::json metrics;
};
};
}

Просмотреть файл

@ -67,8 +67,8 @@ namespace ccf
bool completed;
};
};
ADD_JSON_TRANSLATORS(Proposal::In, script, parameter)
ADD_JSON_TRANSLATORS(Proposal::Out, id, completed)
DECLARE_REQUIRED_JSON_FIELDS(Proposal::In, script, parameter)
DECLARE_REQUIRED_JSON_FIELDS(Proposal::Out, id, completed)
struct OpenProposal : public Proposal::In
{
@ -83,7 +83,8 @@ namespace ccf
MSGPACK_DEFINE(proposer, votes);
};
ADD_JSON_TRANSLATORS_WITH_BASE(OpenProposal, Proposal::In, proposer, votes)
DECLARE_REQUIRED_JSON_FIELDS_WITH_BASE(
OpenProposal, Proposal::In, proposer, votes)
using Proposals = Store::Map<ObjectId, OpenProposal>;
struct ProposalAction
@ -91,13 +92,13 @@ namespace ccf
//! the id of the proposal subject to the action
ObjectId id;
};
ADD_JSON_TRANSLATORS(ProposalAction, id)
DECLARE_REQUIRED_JSON_FIELDS(ProposalAction, id)
struct Vote : public ProposalAction
{
Script ballot;
};
ADD_JSON_TRANSLATORS_WITH_BASE(Vote, ProposalAction, ballot)
DECLARE_REQUIRED_JSON_FIELDS_WITH_BASE(Vote, ProposalAction, ballot)
//! A call proposed by a proposal script
struct ProposedCall
@ -107,7 +108,7 @@ namespace ccf
//! the corresponding arguments
nlohmann::json args;
};
ADD_JSON_TRANSLATORS(ProposedCall, func, args)
DECLARE_REQUIRED_JSON_FIELDS(ProposedCall, func, args)
/** A list of calls proposed (and returned) by a proposal script
* Every proposal script must return a compatible data structure.

71
src/node/rpc/calltypes.h Normal file
Просмотреть файл

@ -0,0 +1,71 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the Apache 2.0 License.
#pragma once
#include <nlohmann/json.hpp>
namespace ccf
{
struct GetCommit
{
struct In
{
std::optional<int64_t> commit = std::nullopt;
};
struct Out
{
uint64_t term;
int64_t commit;
};
};
struct GetMetrics
{
struct HistogramResults
{
int low = {};
int high = {};
size_t overflow = {};
size_t underflow = {};
nlohmann::json buckets = {};
};
struct Out
{
HistogramResults histogram;
nlohmann::json tx_rates;
};
};
struct GetLeaderInfo
{
struct Out
{
NodeId leader_id;
std::string leader_host;
std::string leader_port;
};
};
struct ListMethods
{
struct Out
{
std::vector<std::string> methods;
};
};
struct GetSchema
{
struct In
{
std::string method = {};
};
struct Out
{
nlohmann::json params_schema = nlohmann::json::object();
nlohmann::json result_schema = nlohmann::json::object();
};
};
}

Просмотреть файл

@ -20,6 +20,8 @@ namespace ccf
static constexpr auto GET_METRICS = "getMetrics";
static constexpr auto MK_SIGN = "mkSign";
static constexpr auto GET_LEADER_INFO = "getLeaderInfo";
static constexpr auto LIST_METHODS = "listMethods";
static constexpr auto GET_SCHEMA = "getSchema";
};
struct ManagementProcs

Просмотреть файл

@ -3,6 +3,8 @@
#pragma once
#include "consts.h"
#include "ds/buffer.h"
#include "ds/histogram.h"
#include "ds/json_schema.h"
#include "enclave/rpchandler.h"
#include "forwarder.h"
#include "jsonrpc.h"
@ -15,6 +17,8 @@
#include "rpcexception.h"
#include "serialization.h"
#define FMT_HEADER_ONLY
#include <fmt/format.h>
#include <utility>
#include <vector>
@ -67,6 +71,8 @@ namespace ccf
Certs* certs;
std::optional<Handler> default_handler;
std::unordered_map<std::string, Handler> handlers;
std::unordered_map<std::string, std::pair<nlohmann::json, nlohmann::json>>
schemas;
kv::Replicator* raft;
std::shared_ptr<AbstractForwarder> cmd_forwarder;
kv::TxHistory* history;
@ -161,6 +167,44 @@ namespace ccf
}
}
protected:
void register_schema(
const std::string& name,
const nlohmann::json& params_schema,
const nlohmann::json& result_schema)
{
if (schemas.find(name) != schemas.end())
{
throw std::logic_error("Already registered schema for " + name);
}
schemas[name] = std::make_pair(params_schema, result_schema);
}
template <typename In, typename Out>
void register_auto_schema(const std::string& name)
{
auto params_schema = nlohmann::json::object();
if constexpr (!std::is_same_v<In, void>)
{
params_schema = build_schema<In>(name + "_params");
}
auto result_schema = nlohmann::json::object();
if constexpr (!std::is_same_v<Out, void>)
{
result_schema = build_schema<Out>(name + "_result");
}
register_schema(name, params_schema, result_schema);
}
template <typename T>
void register_auto_schema(const std::string& name)
{
register_auto_schema<typename T::In, typename T::Out>(name);
}
public:
RpcFrontend(Store& tables_) : RpcFrontend(tables_, nullptr, nullptr) {}
@ -172,19 +216,11 @@ namespace ccf
raft(nullptr),
history(nullptr)
{
register_auto_schema<GetCommit>(GeneralProcs::GET_COMMIT);
auto get_commit = [this](Store::Tx& tx, const nlohmann::json& params) {
kv::Version commit;
const auto in = params.get<GetCommit::In>();
if (
params.is_array() && (params.size() > 0) &&
params[0].is_number_unsigned())
{
commit = params[0];
}
else
{
commit = tables.commit_version();
}
kv::Version commit = in.commit.value_or(tables.commit_version());
update_raft();
@ -199,11 +235,13 @@ namespace ccf
"Failed to get commit info from Raft");
};
register_auto_schema<void, GetMetrics::Out>(GeneralProcs::GET_METRICS);
auto get_metrics = [this](Store::Tx& tx, const nlohmann::json& params) {
auto result = metrics.get_metrics();
return jsonrpc::success(GetMetrics::Out{result});
return jsonrpc::success(result);
};
register_auto_schema<void, void>(GeneralProcs::MK_SIGN);
auto make_signature =
[this](Store::Tx& tx, const nlohmann::json& params) {
update_history();
@ -218,22 +256,24 @@ namespace ccf
jsonrpc::ErrorCodes::INTERNAL_ERROR, "Failed to trigger signature");
};
register_auto_schema<void, GetLeaderInfo::Out>(
GeneralProcs::GET_LEADER_INFO);
auto get_leader_info =
[this](Store::Tx& tx, const nlohmann::json& params) {
if ((nodes != nullptr) && (raft != nullptr))
{
NodeId leader_id = raft->leader();
nlohmann::json result;
auto nodes_view = tx.get_view(*nodes);
auto info = nodes_view->get(leader_id);
if (info)
{
result["leader_id"] = leader_id;
result["leader_host"] = info->pubhost;
result["leader_port"] = info->tlsport;
return jsonrpc::success(result);
GetLeaderInfo::Out out;
out.leader_id = leader_id;
out.leader_host = info->pubhost;
out.leader_port = info->tlsport;
return jsonrpc::success(out);
}
}
@ -241,10 +281,50 @@ namespace ccf
jsonrpc::ErrorCodes::TX_LEADER_UNKNOWN, "Leader unknown.");
};
register_auto_schema<void, ListMethods::Out>(GeneralProcs::LIST_METHODS);
auto list_methods = [this](Store::Tx& tx, const nlohmann::json& params) {
ListMethods::Out out;
for (const auto& handler : handlers)
{
out.methods.push_back(handler.first);
}
std::sort(out.methods.begin(), out.methods.end());
return jsonrpc::success(out);
};
register_auto_schema<GetSchema>(GeneralProcs::GET_SCHEMA);
auto get_schema = [this](Store::Tx& tx, const nlohmann::json& params) {
const auto in = params.get<GetSchema::In>();
if (handlers.find(in.method) == handlers.end())
{
return jsonrpc::error(
jsonrpc::ErrorCodes::INVALID_PARAMS,
"No method named " + in.method);
}
const auto it = schemas.find(in.method);
if (it == schemas.end())
{
return jsonrpc::error(
jsonrpc::ErrorCodes::INVALID_PARAMS,
"No schema available for " + in.method);
}
const GetSchema::Out out{it->second.first, it->second.second};
return jsonrpc::success(out);
};
install(GeneralProcs::GET_COMMIT, get_commit, Read);
install(GeneralProcs::GET_METRICS, get_metrics, Read);
install(GeneralProcs::MK_SIGN, make_signature, Write);
install(GeneralProcs::GET_LEADER_INFO, get_leader_info, Read);
install(GeneralProcs::LIST_METHODS, list_methods, Read);
install(GeneralProcs::GET_SCHEMA, get_schema, Read);
}
void disable_request_storing()
@ -497,7 +577,7 @@ namespace ccf
std::string method = rpc[jsonrpc::METHOD];
ctx.req.seq_no = rpc[jsonrpc::ID];
const nlohmann::json params = rpc[jsonrpc::PARAMS];
const nlohmann::json& params = rpc[jsonrpc::PARAMS];
if (!params.is_array() && !params.is_object() && !params.is_null())
return jsonrpc::error_response(
ctx.req.seq_no,
@ -595,6 +675,12 @@ namespace ccf
{
return jsonrpc::error_response(ctx.req.seq_no, e.error_id, e.msg);
}
catch (const JsonParseError& e)
{
const auto err = fmt::format("At {}:\n\t{}", e.pointer(), e.what());
return jsonrpc::error_response(
ctx.req.seq_no, jsonrpc::ErrorCodes::PARSE_ERROR, err);
}
catch (const std::exception& e)
{
return jsonrpc::error_response(

Просмотреть файл

@ -220,21 +220,7 @@ namespace jsonrpc
message(std::string(get_error_prefix(error_code)) + msg)
{}
};
ADD_JSON_TRANSLATORS(Error, code, message)
template <typename T>
void to_json(nlohmann::json& j, const Error& e)
{
j[CODE] = e.code;
j[MESSAGE] = e.message;
}
template <typename T>
void from_json(const nlohmann::json& j, Error& e)
{
e.code = j[CODE];
e.message = j[MESSAGE];
}
ADD_JSON_TRANSLATORS(Error, code, message);
template <typename T>
struct ErrorEx : public Error

Просмотреть файл

@ -2,6 +2,7 @@
// Licensed under the Apache 2.0 License.
#include "ds/histogram.h"
#include "ds/logger.h"
#include "serialization.h"
#include <nlohmann/json.hpp>
@ -25,14 +26,14 @@ namespace metrics
histogram::Global<Hist>("histogram", __FILE__, __LINE__);
Hist histogram = Hist(global);
nlohmann::json get_histogram_results()
ccf::GetMetrics::HistogramResults get_histogram_results()
{
nlohmann::json result;
ccf::GetMetrics::HistogramResults result;
nlohmann::json hist;
result["low"] = histogram.get_low();
result["high"] = histogram.get_high();
result["overflow"] = histogram.get_overflow();
result["underflow"] = histogram.get_underflow();
result.low = histogram.get_low();
result.high = histogram.get_high();
result.overflow = histogram.get_overflow();
result.underflow = histogram.get_underflow();
auto range_counts = histogram.get_range_count();
for (auto const& [range, count] : range_counts)
{
@ -41,7 +42,7 @@ namespace metrics
hist[range] = count;
}
}
result["buckets"] = hist;
result.buckets = hist;
return result;
}
@ -60,7 +61,7 @@ namespace metrics
}
public:
nlohmann::json get_metrics()
ccf::GetMetrics::Out get_metrics()
{
nlohmann::json result;
result["histogram"] = get_histogram_results();

Просмотреть файл

@ -3,16 +3,32 @@
#pragma once
#include "ds/json.h"
#include "node/calltypes.h"
#include "node/rpc/calltypes.h"
namespace ccf
{
ADD_JSON_TRANSLATORS(StartNetwork::In, tx0, id)
ADD_JSON_TRANSLATORS(StartNetwork::Out, network_cert, tx0_sig)
ADD_JSON_TRANSLATORS(JoinNetwork::In, network_cert, hostname, service)
ADD_JSON_TRANSLATORS(JoinNetwork::Out, id)
ADD_JSON_TRANSLATORS(NetworkSecrets::Secret, cert, priv_key, master)
ADD_JSON_TRANSLATORS(JoinNetworkNodeToNode::In, raw_fresh_key)
ADD_JSON_TRANSLATORS(JoinNetworkNodeToNode::Out, id, network_secrets, version)
ADD_JSON_TRANSLATORS(GetCommit::Out, term, commit)
ADD_JSON_TRANSLATORS(GetMetrics::Out, metrics)
DECLARE_REQUIRED_JSON_FIELDS(StartNetwork::In, tx0, id)
DECLARE_REQUIRED_JSON_FIELDS(StartNetwork::Out, network_cert, tx0_sig)
DECLARE_REQUIRED_JSON_FIELDS(JoinNetwork::In, network_cert, hostname, service)
DECLARE_REQUIRED_JSON_FIELDS(JoinNetwork::Out, id)
DECLARE_REQUIRED_JSON_FIELDS(NetworkSecrets::Secret, cert, priv_key, master)
DECLARE_REQUIRED_JSON_FIELDS(JoinNetworkNodeToNode::In, raw_fresh_key)
DECLARE_REQUIRED_JSON_FIELDS(
JoinNetworkNodeToNode::Out, id, network_secrets, version)
DECLARE_REQUIRED_JSON_FIELDS(GetCommit::In)
DECLARE_OPTIONAL_JSON_FIELDS(GetCommit::In, commit)
DECLARE_REQUIRED_JSON_FIELDS(GetCommit::Out, term, commit)
DECLARE_REQUIRED_JSON_FIELDS(
GetMetrics::HistogramResults, low, high, overflow, underflow, buckets)
DECLARE_REQUIRED_JSON_FIELDS(GetMetrics::Out, histogram, tx_rates)
DECLARE_REQUIRED_JSON_FIELDS(
GetLeaderInfo::Out, leader_id, leader_host, leader_port)
DECLARE_REQUIRED_JSON_FIELDS(ListMethods::Out, methods)
DECLARE_REQUIRED_JSON_FIELDS(GetSchema::In, method)
DECLARE_REQUIRED_JSON_FIELDS(GetSchema::Out, params_schema, result_schema)
}

Просмотреть файл

@ -44,8 +44,8 @@ def run(args):
check_notification(
c.rpc("LOG_record", {"id": 43, "msg": msg2}), result="OK"
)
check(c.rpc("LOG_get", {"id": 42}), result=msg)
check(c.rpc("LOG_get", {"id": 43}), result=msg2)
check(c.rpc("LOG_get", {"id": 42}), result={"msg": msg})
check(c.rpc("LOG_get", {"id": 43}), result={"msg": msg2})
LOG.debug("Write on all follower frontends")
with follower.management_client(format="json") as c:
@ -59,8 +59,8 @@ def run(args):
c.rpc("LOG_record", {"id": 100, "msg": follower_msg}),
result="OK",
)
check(c.rpc("LOG_get", {"id": 100}), result=follower_msg)
check(c.rpc("LOG_get", {"id": 42}), result=msg)
check(c.rpc("LOG_get", {"id": 100}), result={"msg": follower_msg})
check(c.rpc("LOG_get", {"id": 42}), result={"msg": msg})
LOG.debug("Write/Read large messages on leader")
with primary.user_client(format="json") as c:
@ -71,7 +71,7 @@ def run(args):
c.rpc("LOG_record", {"id": id, "msg": long_msg}),
result="OK",
)
check(c.rpc("LOG_get", {"id": id}), result=long_msg)
check(c.rpc("LOG_get", {"id": id}), result={"msg": long_msg})
id += 1

Просмотреть файл

@ -38,9 +38,9 @@ def run(args):
for connection in scenario["connections"]:
with (
primary.user_client()
primary.user_client(format="json")
if not connection.get("on_follower")
else random.choice(followers).user_client()
else random.choice(followers).user_client(format="json")
) as client:
txs = connection.get("transactions", [])
@ -60,7 +60,7 @@ def run(args):
)
elif tx.get("expected_result") is not None:
check(r, result=tx.get("expected_result").encode())
check(r, result=tx.get("expected_result"))
else:
check(r, result=lambda res: res is not None)

Просмотреть файл

@ -26,7 +26,7 @@ def wait_for_index_globally_committed(index, term, nodes):
up_to_date_f = []
for f in nodes:
with f.management_client() as c:
id = c.request("getCommit", [index])
id = c.request("getCommit", {"commit": index})
res = c.response(id)
if res.result["term"] == term and res.global_commit > index:
up_to_date_f.append(f.node_id)

Просмотреть файл

@ -309,7 +309,9 @@ class Checker:
if self.management_client:
for i in range(timeout * 10):
r = self.management_client.rpc("getCommit", [rpc_result.commit])
r = self.management_client.rpc(
"getCommit", {"commit": rpc_result.commit}
)
if (
r.global_commit >= rpc_result.commit
and r.result["term"] == rpc_result.term

Просмотреть файл

@ -61,7 +61,7 @@ class TxRates:
with self.primary.user_client(format="json") as client:
rv = client.rpc("getMetrics", {})
result = rv.to_dict()
result = result["result"]["metrics"]
result = result["result"]
self.all_metrics = result
all_rates = []

Просмотреть файл

@ -39,9 +39,9 @@ def check_nodes_have_msgs(nodes, txs):
for node in nodes:
with node.user_client() as c:
for n, msg in txs.priv.items():
c.do("LOG_get", {"id": n}, msg.encode())
c.do("LOG_get", {"id": n}, {"msg": msg.encode()})
for n, msg in txs.pub.items():
c.do("LOG_get_pub", {"id": n}, msg.encode())
c.do("LOG_get_pub", {"id": n}, {"msg": msg.encode()})
def log_msgs(primary, txs):

Просмотреть файл

@ -20,7 +20,9 @@
"params": {
"id": 42
},
"expected_result": "Hello world"
"expected_result": {
"msg": "Hello world"
}
}
]
},
@ -40,7 +42,9 @@
"params": {
"id": 42
},
"expected_result": "Hello world"
"expected_result": {
"msg": "Hello world"
}
}
]
}