From 3f248ca7cc94538f55bc0c8df2e3ea5fcf415c86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Chojnowski?= Date: Tue, 16 Feb 2021 16:03:36 +0100 Subject: [PATCH] utils: fragment_range: move FragmentedView helpers to fragment_range.hh In the upcoming IMR removal patch we will need read_simple() and similar helpers for FragmentedView outside of types.hh. For now, let's move them to fragment_range.hh, where FragmentedView is defined. Since it's a widely included header, we should consider moving them to a more specialized header later. --- types.hh | 49 ------------------ utils/fragment_range.hh | 109 ++++++++++++++++++++++++++++++++++++++++ utils/serialization.hh | 13 ----- 3 files changed, 109 insertions(+), 62 deletions(-) diff --git a/types.hh b/types.hh index 8728a2b297..03ee31d078 100644 --- a/types.hh +++ b/types.hh @@ -1161,26 +1161,6 @@ typename Type::value_type deserialize_value(Type& t, bytes_view v) { return t.deserialize_value(v); } -// Does not check bounds. Must be called only after size is already checked. -template -void read_fragmented(View& v, size_t n, bytes::value_type* out) { - while (n) { - if (n <= v.current_fragment().size()) { - std::copy_n(v.current_fragment().data(), n, out); - v.remove_prefix(n); - n = 0; - } else { - out = std::copy_n(v.current_fragment().data(), v.current_fragment().size(), out); - n -= v.current_fragment().size(); - v.remove_current(); - } - } -} -template<> void inline read_fragmented(single_fragmented_view& v, size_t n, bytes::value_type* out) { - std::copy_n(v.current_fragment().data(), n, out); - v.remove_prefix(n); -} - template T read_simple(bytes_view& v) { if (v.size() < sizeof(T)) { @@ -1191,21 +1171,6 @@ T read_simple(bytes_view& v) { return net::ntoh(*reinterpret_cast*>(p)); } -template -T read_simple(View& v) { - if (v.current_fragment().size() >= sizeof(T)) [[likely]] { - auto p = v.current_fragment().data(); - v.remove_prefix(sizeof(T)); - return net::ntoh(*reinterpret_cast*>(p)); - } else if (v.size_bytes() >= sizeof(T)) { - T buf; - read_fragmented(v, sizeof(T), reinterpret_cast(&buf)); - return net::ntoh(buf); - } else { - throw_with_backtrace(format("read_simple - not enough bytes (expected {:d}, got {:d})", sizeof(T), v.size_bytes())); - } -} - template T read_simple_exactly(bytes_view v) { if (v.size() != sizeof(T)) { @@ -1215,20 +1180,6 @@ T read_simple_exactly(bytes_view v) { return net::ntoh(*reinterpret_cast*>(p)); } -template -T read_simple_exactly(View v) { - if (v.current_fragment().size() == sizeof(T)) [[likely]] { - auto p = v.current_fragment().data(); - return net::ntoh(*reinterpret_cast*>(p)); - } else if (v.size_bytes() == sizeof(T)) { - T buf; - read_fragmented(v, sizeof(T), reinterpret_cast(&buf)); - return net::ntoh(buf); - } else { - throw_with_backtrace(format("read_simple_exactly - size mismatch (expected {:d}, got {:d})", sizeof(T), v.size_bytes())); - } -} - inline bytes_view read_simple_bytes(bytes_view& v, size_t n) { diff --git a/utils/fragment_range.hh b/utils/fragment_range.hh index 853583d429..3971c4bacf 100644 --- a/utils/fragment_range.hh +++ b/utils/fragment_range.hh @@ -24,8 +24,10 @@ #include #include #include +#include #include +#include "marshal_exception.hh" #include "bytes.hh" enum class mutable_view { no, yes, }; @@ -288,6 +290,11 @@ int compare_unsigned(V1 v1, V2 v2) { return v1.size_bytes() - v2.size_bytes(); } +template +int equal_unsigned(V1 v1, V2 v2) { + return v1.size_bytes() == v2.size_bytes() && compare_unsigned(v1, v2) == 0; +} + template void write_fragmented(Dest& dest, Src src) { if (dest.size_bytes() < src.size_bytes()) [[unlikely]] { @@ -300,3 +307,105 @@ void write_fragmented(Dest& dest, Src src) { src.remove_prefix(n); } } + +template +void copy_fragmented_view(Dest dest, Src src) { + if (dest.size_bytes() < src.size_bytes()) [[unlikely]] { + throw std::out_of_range(format("tried to copy a buffer of size {} to a buffer of smaller size {}", src.size_bytes(), dest.size_bytes())); + } + while (!src.empty()) { + size_t n = std::min(dest.current_fragment().size(), src.current_fragment().size()); + memcpy(dest.current_fragment().data(), src.current_fragment().data(), n); + dest.remove_prefix(n); + src.remove_prefix(n); + } +} + +// Does not check bounds. Must be called only after size is already checked. +template +void read_fragmented(View& v, size_t n, bytes::value_type* out) { + while (n) { + if (n <= v.current_fragment().size()) { + std::copy_n(v.current_fragment().data(), n, out); + v.remove_prefix(n); + n = 0; + } else { + out = std::copy_n(v.current_fragment().data(), v.current_fragment().size(), out); + n -= v.current_fragment().size(); + v.remove_current(); + } + } +} +template<> void inline read_fragmented(single_fragmented_view& v, size_t n, bytes::value_type* out) { + std::copy_n(v.current_fragment().data(), n, out); + v.remove_prefix(n); +} + +template +T read_simple_native(View& v) { + if (v.current_fragment().size() >= sizeof(T)) [[likely]] { + auto p = v.current_fragment().data(); + v.remove_prefix(sizeof(T)); + return *reinterpret_cast*>(p); + } else if (v.size_bytes() >= sizeof(T)) { + T buf; + read_fragmented(v, sizeof(T), reinterpret_cast(&buf)); + return buf; + } else { + throw_with_backtrace(format("read_simple - not enough bytes (expected {:d}, got {:d})", sizeof(T), v.size_bytes())); + } +} + +template +T read_simple(View& v) { + if (v.current_fragment().size() >= sizeof(T)) [[likely]] { + auto p = v.current_fragment().data(); + v.remove_prefix(sizeof(T)); + return net::ntoh(*reinterpret_cast*>(p)); + } else if (v.size_bytes() >= sizeof(T)) { + T buf; + read_fragmented(v, sizeof(T), reinterpret_cast(&buf)); + return net::ntoh(buf); + } else { + throw_with_backtrace(format("read_simple - not enough bytes (expected {:d}, got {:d})", sizeof(T), v.size_bytes())); + } +} + +template +T read_simple_exactly(View v) { + if (v.current_fragment().size() == sizeof(T)) [[likely]] { + auto p = v.current_fragment().data(); + return net::ntoh(*reinterpret_cast*>(p)); + } else if (v.size_bytes() == sizeof(T)) { + T buf; + read_fragmented(v, sizeof(T), reinterpret_cast(&buf)); + return net::ntoh(buf); + } else { + throw_with_backtrace(format("read_simple_exactly - size mismatch (expected {:d}, got {:d})", sizeof(T), v.size_bytes())); + } +} + +template +static inline +void write(Out& out, std::type_identity_t val) { + auto v = net::ntoh(val); + auto p = reinterpret_cast(&v); + if (out.current_fragment().size() >= sizeof(v)) [[likely]] { + std::copy_n(p, sizeof(v), out.current_fragment().data()); + out.remove_prefix(sizeof(v)); + } else { + write_fragmented(out, single_fragmented_view(bytes_view(p, sizeof(v)))); + } +} + +template +static inline +void write_native(Out& out, std::type_identity_t v) { + auto p = reinterpret_cast(&v); + if (out.current_fragment().size() >= sizeof(v)) [[likely]] { + std::copy_n(p, sizeof(v), out.current_fragment().data()); + out.remove_prefix(sizeof(v)); + } else { + write_fragmented(out, single_fragmented_view(bytes_view(p, sizeof(v)))); + } +} diff --git a/utils/serialization.hh b/utils/serialization.hh index 53ca3a8430..810612bde4 100644 --- a/utils/serialization.hh +++ b/utils/serialization.hh @@ -160,16 +160,3 @@ void write(CharOutputIterator& out, const T& val) { auto v = net::ntoh(val); out = std::copy_n(reinterpret_cast(&v), sizeof(v), out); } - -template -static inline -void write(Out& out, std::type_identity_t val) { - auto v = net::ntoh(val); - auto p = reinterpret_cast(&v); - if (out.current_fragment().size() >= sizeof(v)) [[likely]] { - std::copy_n(p, sizeof(v), out.current_fragment().data()); - out.remove_prefix(sizeof(v)); - } else { - write_fragmented(out, single_fragmented_view(bytes_view(p, sizeof(v)))); - } -}