FieldLoc fl = { off, field };
buf_.scratch_push_small(fl);
num_field_loc++;
- if (field > max_voffset_) {
- max_voffset_ = field;
- }
+ if (field > max_voffset_) { max_voffset_ = field; }
}
// Like PushElement, but additionally tracks the field this represents.
void SwapBufAllocator(FlatBufferBuilder &other) {
buf_.swap_allocator(other.buf_);
}
-
+
/// @brief The length of a FlatBuffer file header.
static const size_t kFileIdentifierLength =
::flatbuffers::kFileIdentifierLength;
};
// String which identifies the current version of FlatBuffers.
-inline const char * flatbuffers_version_string() {
+inline const char *flatbuffers_version_string() {
return "FlatBuffers " FLATBUFFERS_STRING(FLATBUFFERS_VERSION_MAJOR) "."
FLATBUFFERS_STRING(FLATBUFFERS_VERSION_MINOR) "."
FLATBUFFERS_STRING(FLATBUFFERS_VERSION_REVISION);
}
+// clang-format off
#define FLATBUFFERS_DEFINE_BITMASK_OPERATORS(E, T)\
inline E operator | (E lhs, E rhs){\
return E(T(lhs) | T(rhs));\
class Reference {
public:
Reference()
- : data_(nullptr),
- parent_width_(0),
- byte_width_(0),
- type_(FBT_NULL) {}
+ : data_(nullptr), parent_width_(0), byte_width_(0), type_(FBT_NULL) {}
Reference(const uint8_t *data, uint8_t parent_width, uint8_t byte_width,
Type type)
// comes at the cost of using additional memory the same size of
// the buffer being verified, so it is by default off.
std::vector<uint8_t> *reuse_tracker = nullptr,
- bool _check_alignment = true,
- size_t max_depth = 64)
+ bool _check_alignment = true, size_t max_depth = 64)
: buf_(buf),
size_(buf_len),
depth_(0),
auto o = static_cast<size_t>(p - buf_);
return VerifyBefore(o, len);
}
-
+
bool VerifyByteWidth(size_t width) {
return Check(width == 1 || width == 2 || width == 4 || width == 8);
}
- bool VerifyType(int type) {
- return Check(type >= 0 && type < FBT_MAX_TYPE);
- }
+ bool VerifyType(int type) { return Check(type >= 0 && type < FBT_MAX_TYPE); }
bool VerifyOffset(uint64_t off, const uint8_t *p) {
return Check(off <= static_cast<uint64_t>(size_)) &&
- off <= static_cast<uint64_t>(p - buf_);
+ off <= static_cast<uint64_t>(p - buf_);
}
bool VerifyAlignment(const uint8_t *p, size_t size) const {
return Check((o & (size - 1)) == 0 || !check_alignment_);
}
- // Macro, since we want to escape from parent function & use lazy args.
- #define FLEX_CHECK_VERIFIED(P, PACKED_TYPE) \
- if (reuse_tracker_) { \
- auto packed_type = PACKED_TYPE; \
- auto existing = (*reuse_tracker_)[P - buf_]; \
- if (existing == packed_type) return true; \
- /* Fail verification if already set with different type! */ \
- if (!Check(existing == 0)) return false; \
- (*reuse_tracker_)[P - buf_] = packed_type; \
- }
+// Macro, since we want to escape from parent function & use lazy args.
+#define FLEX_CHECK_VERIFIED(P, PACKED_TYPE) \
+ if (reuse_tracker_) { \
+ auto packed_type = PACKED_TYPE; \
+ auto existing = (*reuse_tracker_)[P - buf_]; \
+ if (existing == packed_type) return true; \
+ /* Fail verification if already set with different type! */ \
+ if (!Check(existing == 0)) return false; \
+ (*reuse_tracker_)[P - buf_] = packed_type; \
+ }
bool VerifyVector(Reference r, const uint8_t *p, Type elem_type) {
// Any kind of nesting goes thru this function, so guard against that
if (!Check(depth_ <= max_depth_ && num_vectors_ <= max_vectors_))
return false;
auto size_byte_width = r.byte_width_;
- FLEX_CHECK_VERIFIED(p, PackedType(Builder::WidthB(size_byte_width), r.type_));
- if (!VerifyBeforePointer(p, size_byte_width))
- return false;
+ FLEX_CHECK_VERIFIED(p,
+ PackedType(Builder::WidthB(size_byte_width), r.type_));
+ if (!VerifyBeforePointer(p, size_byte_width)) return false;
auto sized = Sized(p, size_byte_width);
auto num_elems = sized.size();
- auto elem_byte_width =
- r.type_ == FBT_STRING || r.type_ == FBT_BLOB ? uint8_t(1) : r.byte_width_;
+ auto elem_byte_width = r.type_ == FBT_STRING || r.type_ == FBT_BLOB
+ ? uint8_t(1)
+ : r.byte_width_;
auto max_elems = SIZE_MAX / elem_byte_width;
if (!Check(num_elems < max_elems))
return false; // Protect against byte_size overflowing.
auto byte_size = num_elems * elem_byte_width;
- if (!VerifyFromPointer(p, byte_size))
- return false;
+ if (!VerifyFromPointer(p, byte_size)) return false;
if (elem_type == FBT_NULL) {
// Verify type bytes after the vector.
if (!VerifyFromPointer(p + byte_size, num_elems)) return false;
bool VerifyKeys(const uint8_t *p, uint8_t byte_width) {
// The vector part of the map has already been verified.
const size_t num_prefixed_fields = 3;
- if (!VerifyBeforePointer(p, byte_width * num_prefixed_fields))
- return false;
+ if (!VerifyBeforePointer(p, byte_width * num_prefixed_fields)) return false;
p -= byte_width * num_prefixed_fields;
auto off = ReadUInt64(p, byte_width);
- if (!VerifyOffset(off, p))
- return false;
+ if (!VerifyOffset(off, p)) return false;
auto key_byte_with =
- static_cast<uint8_t>(ReadUInt64(p + byte_width, byte_width));
- if (!VerifyByteWidth(key_byte_with))
- return false;
+ static_cast<uint8_t>(ReadUInt64(p + byte_width, byte_width));
+ if (!VerifyByteWidth(key_byte_with)) return false;
return VerifyVector(Reference(p, byte_width, key_byte_with, FBT_VECTOR_KEY),
p - off, FBT_KEY);
}
- bool VerifyKey(const uint8_t* p) {
+ bool VerifyKey(const uint8_t *p) {
FLEX_CHECK_VERIFIED(p, PackedType(BIT_WIDTH_8, FBT_KEY));
while (p < buf_ + size_)
if (*p++) return true;
return false;
}
- #undef FLEX_CHECK_VERIFIED
+#undef FLEX_CHECK_VERIFIED
bool VerifyTerminator(const String &s) {
return VerifyFromPointer(reinterpret_cast<const uint8_t *>(s.c_str()),
}
// All remaining types are an offset.
auto off = ReadUInt64(r.data_, r.parent_width_);
- if (!VerifyOffset(off, r.data_))
- return false;
+ if (!VerifyOffset(off, r.data_)) return false;
auto p = r.Indirect();
- if (!VerifyAlignment(p, r.byte_width_))
- return false;
+ if (!VerifyAlignment(p, r.byte_width_)) return false;
switch (r.type_) {
case FBT_INDIRECT_INT:
case FBT_INDIRECT_UINT:
- case FBT_INDIRECT_FLOAT:
- return VerifyFromPointer(p, r.byte_width_);
- case FBT_KEY:
- return VerifyKey(p);
+ case FBT_INDIRECT_FLOAT: return VerifyFromPointer(p, r.byte_width_);
+ case FBT_KEY: return VerifyKey(p);
case FBT_MAP:
- return VerifyVector(r, p, FBT_NULL) &&
- VerifyKeys(p, r.byte_width_);
- case FBT_VECTOR:
- return VerifyVector(r, p, FBT_NULL);
- case FBT_VECTOR_INT:
- return VerifyVector(r, p, FBT_INT);
+ return VerifyVector(r, p, FBT_NULL) && VerifyKeys(p, r.byte_width_);
+ case FBT_VECTOR: return VerifyVector(r, p, FBT_NULL);
+ case FBT_VECTOR_INT: return VerifyVector(r, p, FBT_INT);
case FBT_VECTOR_BOOL:
- case FBT_VECTOR_UINT:
- return VerifyVector(r, p, FBT_UINT);
- case FBT_VECTOR_FLOAT:
- return VerifyVector(r, p, FBT_FLOAT);
- case FBT_VECTOR_KEY:
- return VerifyVector(r, p, FBT_KEY);
+ case FBT_VECTOR_UINT: return VerifyVector(r, p, FBT_UINT);
+ case FBT_VECTOR_FLOAT: return VerifyVector(r, p, FBT_FLOAT);
+ case FBT_VECTOR_KEY: return VerifyVector(r, p, FBT_KEY);
case FBT_VECTOR_STRING_DEPRECATED:
// Use of FBT_KEY here intentional, see elsewhere.
return VerifyVector(r, p, FBT_KEY);
- case FBT_BLOB:
- return VerifyVector(r, p, FBT_UINT);
+ case FBT_BLOB: return VerifyVector(r, p, FBT_UINT);
case FBT_STRING:
return VerifyVector(r, p, FBT_UINT) &&
VerifyTerminator(String(p, r.byte_width_));
case FBT_VECTOR_FLOAT4: {
uint8_t len = 0;
auto vtype = ToFixedTypedVectorElementType(r.type_, &len);
- if (!VerifyType(vtype))
- return false;
+ if (!VerifyType(vtype)) return false;
return VerifyFromPointer(p, r.byte_width_ * len);
}
- default:
- return false;
+ default: return false;
}
}
auto end = buf_ + size_;
auto byte_width = *--end;
auto packed_type = *--end;
- return VerifyByteWidth(byte_width) &&
- Check(end - buf_ >= byte_width) &&
+ return VerifyByteWidth(byte_width) && Check(end - buf_ >= byte_width) &&
VerifyRef(Reference(end - byte_width, byte_width, packed_type));
}
std::vector<uint8_t> *reuse_tracker_;
};
-// Utility function that contructs the Verifier for you, see above for parameters.
+// Utility function that contructs the Verifier for you, see above for
+// parameters.
inline bool VerifyBuffer(const uint8_t *buf, size_t buf_len,
std::vector<uint8_t> *reuse_tracker = nullptr) {
Verifier verifier(buf, buf_len, reuse_tracker);
return verifier.VerifyBuffer();
}
-
#ifdef FLATBUFFERS_H_
// This is a verifier utility function that works together with the
// FlatBuffers verifier, which should only be present if flatbuffer.h
inline bool VerifyNestedFlexBuffer(const flatbuffers::Vector<uint8_t> *nv,
flatbuffers::Verifier &verifier) {
if (!nv) return true;
- return verifier.Check(
- flexbuffers::VerifyBuffer(nv->data(), nv->size(),
- verifier.GetFlexReuseTracker()));
+ return verifier.Check(flexbuffers::VerifyBuffer(
+ nv->data(), nv->size(), verifier.GetFlexReuseTracker()));
}
#endif
// Verify a particular field.
template<typename T>
- bool VerifyField(const Verifier &verifier, voffset_t field, size_t align) const {
+ bool VerifyField(const Verifier &verifier, voffset_t field,
+ size_t align) const {
// Calling GetOptionalFieldOffset should be safe now thanks to
// VerifyTable().
auto field_offset = GetOptionalFieldOffset(field);
// Check the actual field.
- return !field_offset ||
- verifier.VerifyField<T>(data_, field_offset, align);
+ return !field_offset || verifier.VerifyField<T>(data_, field_offset, align);
}
// VerifyField for required fields.
template<typename T>
- bool VerifyFieldRequired(const Verifier &verifier, voffset_t field, size_t align) const {
+ bool VerifyFieldRequired(const Verifier &verifier, voffset_t field,
+ size_t align) const {
auto field_offset = GetOptionalFieldOffset(field);
return verifier.Check(field_offset != 0) &&
verifier.VerifyField<T>(data_, field_offset, align);
#ifndef FLATBUFFERS_UTIL_H_
#define FLATBUFFERS_UTIL_H_
-#include <errno.h>
#include <ctype.h>
+#include <errno.h>
#include "flatbuffers/base.h"
#include "flatbuffers/stl_emulation.h"
#ifndef FLATBUFFERS_PREFER_PRINTF
-# include <sstream>
# include <iomanip>
+# include <sstream>
#else // FLATBUFFERS_PREFER_PRINTF
# include <float.h>
# include <stdio.h>
}
// Verify relative to a known-good base pointer.
- bool VerifyFieldStruct(const uint8_t *base, voffset_t elem_off, size_t elem_len,
- size_t align) const {
+ bool VerifyFieldStruct(const uint8_t *base, voffset_t elem_off,
+ size_t elem_len, size_t align) const {
auto f = static_cast<size_t>(base - buf_) + elem_off;
return VerifyAlignment(f, align) && Verify(f, elem_len);
}
template<typename T>
- bool VerifyField(const uint8_t *base, voffset_t elem_off, size_t align) const {
+ bool VerifyField(const uint8_t *base, voffset_t elem_off,
+ size_t align) const {
auto f = static_cast<size_t>(base - buf_) + elem_off;
return VerifyAlignment(f, align) && Verify(f, sizeof(T));
}
// clang-format on
}
- std::vector<uint8_t> *GetFlexReuseTracker() {
- return flex_reuse_tracker_;
- }
+ std::vector<uint8_t> *GetFlexReuseTracker() { return flex_reuse_tracker_; }
void SetFlexReuseTracker(std::vector<uint8_t> *rt) {
flex_reuse_tracker_ = rt;
// Check if a size-prefixed buffer has the identifier.
code_ += "inline \\";
- code_ += "bool SizePrefixed{{STRUCT_NAME}}BufferHasIdentifier(const void *buf) {";
+ code_ +=
+ "bool SizePrefixed{{STRUCT_NAME}}BufferHasIdentifier(const void "
+ "*buf) {";
code_ += " return flatbuffers::BufferHasIdentifier(";
code_ += " buf, {{STRUCT_NAME}}Identifier(), true);";
code_ += "}";
if (ev.union_type.base_type == BASE_TYPE_STRUCT) {
if (ev.union_type.struct_def->fixed) {
code_.SetValue("ALIGN",
- NumToString(ev.union_type.struct_def->minalign));
+ NumToString(ev.union_type.struct_def->minalign));
code_ +=
" return verifier.VerifyField<{{TYPE}}>("
"static_cast<const uint8_t *>(obj), 0, {{ALIGN}});";
code_.SetValue("OFFSET", GenFieldOffsetName(field));
if (IsScalar(field.value.type.base_type) || IsStruct(field.value.type)) {
code_.SetValue("ALIGN", NumToString(InlineAlignment(field.value.type)));
- code_ += "{{PRE}}VerifyField{{REQUIRED}}<{{SIZE}}>(verifier, "
- "{{OFFSET}}, {{ALIGN}})\\";
+ code_ +=
+ "{{PRE}}VerifyField{{REQUIRED}}<{{SIZE}}>(verifier, "
+ "{{OFFSET}}, {{ALIGN}})\\";
} else {
code_ += "{{PRE}}VerifyOffset{{REQUIRED}}(verifier, {{OFFSET}})\\";
}
if (!nfn.empty()) {
code_.SetValue("CPP_NAME", nfn);
// FIXME: file_identifier.
- code_ += "{{PRE}}verifier.VerifyNestedFlatBuffer<{{CPP_NAME}}>"
- "({{NAME}}(), nullptr)\\";
+ code_ +=
+ "{{PRE}}verifier.VerifyNestedFlatBuffer<{{CPP_NAME}}>"
+ "({{NAME}}(), nullptr)\\";
} else if (field.flexbuffer) {
- code_ += "{{PRE}}flexbuffers::VerifyNestedFlexBuffer"
- "({{NAME}}(), verifier)\\";
+ code_ +=
+ "{{PRE}}flexbuffers::VerifyNestedFlexBuffer"
+ "({{NAME}}(), verifier)\\";
}
break;
}
}
// Returns {field<val: -1, field==val: 0, field>val: +1}.
code_.SetValue("KEY_TYPE", type);
- code_ += " int KeyCompareWithValue({{KEY_TYPE}} _{{FIELD_NAME}}) const {";
+ code_ +=
+ " int KeyCompareWithValue({{KEY_TYPE}} _{{FIELD_NAME}}) const {";
code_ +=
" return static_cast<int>({{FIELD_NAME}}() > _{{FIELD_NAME}}) - "
"static_cast<int>({{FIELD_NAME}}() < _{{FIELD_NAME}});";
}
break;
}
- case BASE_TYPE_UNION:
- GetUnionField(struct_def, field, code_ptr);
- break;
- default:
- FLATBUFFERS_ASSERT(0);
+ case BASE_TYPE_UNION: GetUnionField(struct_def, field, code_ptr); break;
+ default: FLATBUFFERS_ASSERT(0);
}
}
if (IsVector(field.value.type) || IsArray(field.value.type)) {
import_list->insert("import " + package_reference);
}
break;
- case BASE_TYPE_STRING:
- field_type += "str";
- break;
- case BASE_TYPE_NONE:
- field_type += "None";
- break;
- default:
- break;
+ case BASE_TYPE_STRING: field_type += "str"; break;
+ case BASE_TYPE_NONE: field_type += "None"; break;
+ default: break;
}
field_types += field_type + separator_string;
}
GenUnPackForScalarVector(struct_def, field, &code);
break;
}
- default:
- GenUnPackForScalar(struct_def, field, &code);
+ default: GenUnPackForScalar(struct_def, field, &code);
}
}
code_ += " serializer.serialize_u32(self.bits() as u32)";
} else {
code_ +=
- " serializer.serialize_unit_variant(\"{{ENUM_NAME}}\", self.0 as "
+ " serializer.serialize_unit_variant(\"{{ENUM_NAME}}\", self.0 "
+ "as "
"u32, self.variant_name().unwrap())";
}
code_ += " }";
ForAllStructFields(struct_def, [&](const FieldDef &unused) {
(void)unused;
code_ +=
- " s.serialize_field(\"{{FIELD_NAME}}\", &self.{{FIELD_NAME}}())?;";
+ " s.serialize_field(\"{{FIELD_NAME}}\", "
+ "&self.{{FIELD_NAME}}())?;";
});
code_ += " s.end()";
code_ += " }";
code_ += "";
if (parser_.opts.rust_serialize) {
code_ += indent + "extern crate serde;";
- code_ += indent +
- "use self::serde::ser::{Serialize, Serializer, SerializeStruct};";
+ code_ +=
+ indent +
+ "use self::serde::ser::{Serialize, Serializer, SerializeStruct};";
code_ += "";
}
code_ += indent + "extern crate flatbuffers;";
}
if (IsBool(field.value.type.base_type)) {
- std::string default_value = field.IsOptional() ? "nil" :
- ("0" == field.value.constant ? "false" : "true");
+ std::string default_value =
+ field.IsOptional() ? "nil"
+ : ("0" == field.value.constant ? "false" : "true");
code_.SetValue("CONSTANT", default_value);
code_.SetValue("VALUETYPE", "Bool");
code_ += GenReaderMainBody(optional) + "\\";
switch (type.base_type) {
case BASE_TYPE_BOOL: return allowNull ? "boolean|null" : "boolean";
case BASE_TYPE_LONG:
- case BASE_TYPE_ULONG:
- return allowNull ? "bigint|null" : "bigint";
+ case BASE_TYPE_ULONG: return allowNull ? "bigint|null" : "bigint";
default:
if (IsScalar(type.base_type)) {
if (type.enum_def) {
// a string that contains values for things that can be created inline or
// the variable name from field_offset_decl
std::string field_offset_val;
- const auto field_default_val =
- GenDefaultValue(field, imports);
+ const auto field_default_val = GenDefaultValue(field, imports);
// Emit a scalar field
const auto is_string = IsString(field.value.type);
std::string GetArgName(const FieldDef &field) {
auto argname = MakeCamel(field.name, false);
- if (!IsScalar(field.value.type.base_type)) {
+ if (!IsScalar(field.value.type.base_type)) {
argname += "Offset";
} else {
argname = EscapeKeyword(argname);
if (opts.json_nested_legacy_flatbuffers) {
ECHECK(ParseAnyValue(val, field, fieldn, parent_struct_def, 0));
} else {
- return Error("cannot parse nested_flatbuffer as bytes unless"
- " --json-nested-bytes is set");
+ return Error(
+ "cannot parse nested_flatbuffer as bytes unless"
+ " --json-nested-bytes is set");
}
} else {
auto cursor_at_value_begin = cursor_;
});
else
std::sort(v.begin(), v.end(), [](const EnumVal *e1, const EnumVal *e2) {
- if (e1->GetAsInt64() == e2->GetAsInt64()) {
- return e1->name < e2->name;
- }
+ if (e1->GetAsInt64() == e2->GetAsInt64()) { return e1->name < e2->name; }
return e1->GetAsInt64() < e2->GetAsInt64();
});
}
case reflection::Bool:
case reflection::Byte:
case reflection::UByte:
- if (!table->VerifyField<int8_t>(v, field_def->offset(),
- sizeof(int8_t)))
+ if (!table->VerifyField<int8_t>(v, field_def->offset(), sizeof(int8_t)))
return false;
break;
case reflection::Short:
return false;
break;
case reflection::Float:
- if (!table->VerifyField<float>(v, field_def->offset(),
- sizeof(float)))
+ if (!table->VerifyField<float>(v, field_def->offset(), sizeof(float)))
return false;
break;
case reflection::Double:
- if (!table->VerifyField<double>(v, field_def->offset(),
- sizeof(double)))
+ if (!table->VerifyField<double>(v, field_def->offset(), sizeof(double)))
return false;
break;
case reflection::String:
}
// Utility function to check a Monster object.
-void CheckMonsterObject(MonsterT* monster2) {
+void CheckMonsterObject(MonsterT *monster2) {
TEST_EQ(monster2->hp, 80);
TEST_EQ(monster2->mana, 150); // default
TEST_EQ_STR(monster2->name.c_str(), "MyMonster");
// Test object copy.
auto monster3 = *monster2;
flatbuffers::FlatBufferBuilder fbb3;
- fbb3.Finish(CreateMonster(fbb3, &monster3, &rehasher),
- MonsterIdentifier());
+ fbb3.Finish(CreateMonster(fbb3, &monster3, &rehasher), MonsterIdentifier());
const auto len3 = fbb3.GetSize();
TEST_EQ(len2, len3);
TEST_EQ(memcmp(fbb2.GetBufferPointer(), fbb3.GetBufferPointer(), len2), 0);