2 * Copyright 2015 Google Inc. All rights reserved.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #include "flatbuffers/reflection.h"
18 #include "flatbuffers/util.h"
20 // Helper functionality for reflection.
22 namespace flatbuffers {
24 int64_t GetAnyValueI(reflection::BaseType type, const uint8_t *data) {
26 #define FLATBUFFERS_GET(T) static_cast<int64_t>(ReadScalar<T>(data))
28 case reflection::UType:
29 case reflection::Bool:
30 case reflection::UByte: return FLATBUFFERS_GET(uint8_t);
31 case reflection::Byte: return FLATBUFFERS_GET(int8_t);
32 case reflection::Short: return FLATBUFFERS_GET(int16_t);
33 case reflection::UShort: return FLATBUFFERS_GET(uint16_t);
34 case reflection::Int: return FLATBUFFERS_GET(int32_t);
35 case reflection::UInt: return FLATBUFFERS_GET(uint32_t);
36 case reflection::Long: return FLATBUFFERS_GET(int64_t);
37 case reflection::ULong: return FLATBUFFERS_GET(uint64_t);
38 case reflection::Float: return FLATBUFFERS_GET(float);
39 case reflection::Double: return FLATBUFFERS_GET(double);
40 case reflection::String: {
41 auto s = reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) +
43 return s ? StringToInt(s->c_str()) : 0;
45 default: return 0; // Tables & vectors do not make sense.
47 #undef FLATBUFFERS_GET
51 double GetAnyValueF(reflection::BaseType type, const uint8_t *data) {
53 case reflection::Float: return static_cast<double>(ReadScalar<float>(data));
54 case reflection::Double: return ReadScalar<double>(data);
55 case reflection::String: {
57 reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) + data);
58 return s ? strtod(s->c_str(), nullptr) : 0.0;
60 default: return static_cast<double>(GetAnyValueI(type, data));
64 std::string GetAnyValueS(reflection::BaseType type, const uint8_t *data,
65 const reflection::Schema *schema, int type_index) {
67 case reflection::Float:
68 case reflection::Double: return NumToString(GetAnyValueF(type, data));
69 case reflection::String: {
71 reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) + data);
72 return s ? s->c_str() : "";
76 // Convert the table to a string. This is mostly for debugging purposes,
77 // and does NOT promise to be JSON compliant.
78 // Also prefixes the type.
79 auto &objectdef = *schema->objects()->Get(type_index);
80 auto s = objectdef.name()->str();
81 if (objectdef.is_struct()) {
82 s += "(struct)"; // TODO: implement this as well.
84 auto table_field = reinterpret_cast<const Table *>(
85 ReadScalar<uoffset_t>(data) + data);
87 auto fielddefs = objectdef.fields();
88 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
89 auto &fielddef = **it;
90 if (!table_field->CheckField(fielddef.offset())) continue;
91 auto val = GetAnyFieldS(*table_field, fielddef, schema);
92 if (fielddef.type()->base_type() == reflection::String) {
94 flatbuffers::EscapeString(val.c_str(), val.length(), &esc, true,
98 s += fielddef.name()->str();
109 case reflection::Vector:
110 return "[(elements)]"; // TODO: implement this as well.
111 case reflection::Union: return "(union)"; // TODO: implement this as well.
112 default: return NumToString(GetAnyValueI(type, data));
116 void SetAnyValueI(reflection::BaseType type, uint8_t *data, int64_t val) {
118 #define FLATBUFFERS_SET(T) WriteScalar(data, static_cast<T>(val))
120 case reflection::UType:
121 case reflection::Bool:
122 case reflection::UByte: FLATBUFFERS_SET(uint8_t ); break;
123 case reflection::Byte: FLATBUFFERS_SET(int8_t ); break;
124 case reflection::Short: FLATBUFFERS_SET(int16_t ); break;
125 case reflection::UShort: FLATBUFFERS_SET(uint16_t); break;
126 case reflection::Int: FLATBUFFERS_SET(int32_t ); break;
127 case reflection::UInt: FLATBUFFERS_SET(uint32_t); break;
128 case reflection::Long: FLATBUFFERS_SET(int64_t ); break;
129 case reflection::ULong: FLATBUFFERS_SET(uint64_t); break;
130 case reflection::Float: FLATBUFFERS_SET(float ); break;
131 case reflection::Double: FLATBUFFERS_SET(double ); break;
132 // TODO: support strings
135 #undef FLATBUFFERS_SET
139 void SetAnyValueF(reflection::BaseType type, uint8_t *data, double val) {
141 case reflection::Float: WriteScalar(data, static_cast<float>(val)); break;
142 case reflection::Double: WriteScalar(data, val); break;
143 // TODO: support strings.
144 default: SetAnyValueI(type, data, static_cast<int64_t>(val)); break;
148 void SetAnyValueS(reflection::BaseType type, uint8_t *data, const char *val) {
150 case reflection::Float:
151 case reflection::Double:
152 SetAnyValueF(type, data, strtod(val, nullptr));
154 // TODO: support strings.
155 default: SetAnyValueI(type, data, StringToInt(val)); break;
159 // Resize a FlatBuffer in-place by iterating through all offsets in the buffer
160 // and adjusting them by "delta" if they straddle the start offset.
161 // Once that is done, bytes can now be inserted/deleted safely.
162 // "delta" may be negative (shrinking).
163 // Unless "delta" is a multiple of the largest alignment, you'll create a small
164 // amount of garbage space in the buffer (usually 0..7 bytes).
165 // If your FlatBuffer's root table is not the schema's root table, you should
166 // pass in your root_table type as well.
167 class ResizeContext {
169 ResizeContext(const reflection::Schema &schema, uoffset_t start, int delta,
170 std::vector<uint8_t> *flatbuf,
171 const reflection::Object *root_table = nullptr)
173 startptr_(vector_data(*flatbuf) + start),
176 dag_check_(flatbuf->size() / sizeof(uoffset_t), false) {
177 auto mask = static_cast<int>(sizeof(largest_scalar_t) - 1);
178 delta_ = (delta_ + mask) & ~mask;
179 if (!delta_) return; // We can't shrink by less than largest_scalar_t.
180 // Now change all the offsets by delta_.
181 auto root = GetAnyRoot(vector_data(buf_));
182 Straddle<uoffset_t, 1>(vector_data(buf_), root, vector_data(buf_));
183 ResizeTable(root_table ? *root_table : *schema.root_table(), root);
184 // We can now add or remove bytes at start.
186 buf_.insert(buf_.begin() + start, delta_, 0);
188 buf_.erase(buf_.begin() + start, buf_.begin() + start - delta_);
191 // Check if the range between first (lower address) and second straddles
192 // the insertion point. If it does, change the offset at offsetloc (of
193 // type T, with direction D).
194 template<typename T, int D>
195 void Straddle(const void *first, const void *second, void *offsetloc) {
196 if (first <= startptr_ && second >= startptr_) {
197 WriteScalar<T>(offsetloc, ReadScalar<T>(offsetloc) + delta_ * D);
198 DagCheck(offsetloc) = true;
202 // This returns a boolean that records if the corresponding offset location
203 // has been modified already. If so, we can't even read the corresponding
204 // offset, since it is pointing to a location that is illegal until the
205 // resize actually happens.
206 // This must be checked for every offset, since we can't know which offsets
207 // will straddle and which won't.
208 uint8_t &DagCheck(const void *offsetloc) {
209 auto dag_idx = reinterpret_cast<const uoffset_t *>(offsetloc) -
210 reinterpret_cast<const uoffset_t *>(vector_data(buf_));
211 return dag_check_[dag_idx];
214 void ResizeTable(const reflection::Object &objectdef, Table *table) {
215 if (DagCheck(table)) return; // Table already visited.
216 auto vtable = table->GetVTable();
217 // Early out: since all fields inside the table must point forwards in
218 // memory, if the insertion point is before the table we can stop here.
219 auto tableloc = reinterpret_cast<uint8_t *>(table);
220 if (startptr_ <= tableloc) {
221 // Check if insertion point is between the table and a vtable that
222 // precedes it. This can't happen in current construction code, but check
223 // just in case we ever change the way flatbuffers are built.
224 Straddle<soffset_t, -1>(vtable, table, table);
227 auto fielddefs = objectdef.fields();
228 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
229 auto &fielddef = **it;
230 auto base_type = fielddef.type()->base_type();
232 if (base_type <= reflection::Double) continue;
233 // Ignore fields that are not stored.
234 auto offset = table->GetOptionalFieldOffset(fielddef.offset());
235 if (!offset) continue;
238 base_type == reflection::Obj
239 ? schema_.objects()->Get(fielddef.type()->index())
241 if (subobjectdef && subobjectdef->is_struct()) continue;
242 // Get this fields' offset, and read it if safe.
243 auto offsetloc = tableloc + offset;
244 if (DagCheck(offsetloc)) continue; // This offset already visited.
245 auto ref = offsetloc + ReadScalar<uoffset_t>(offsetloc);
246 Straddle<uoffset_t, 1>(offsetloc, ref, offsetloc);
249 case reflection::Obj: {
250 ResizeTable(*subobjectdef, reinterpret_cast<Table *>(ref));
253 case reflection::Vector: {
254 auto elem_type = fielddef.type()->element();
255 if (elem_type != reflection::Obj && elem_type != reflection::String)
257 auto vec = reinterpret_cast<Vector<uoffset_t> *>(ref);
259 elem_type == reflection::Obj
260 ? schema_.objects()->Get(fielddef.type()->index())
262 if (elemobjectdef && elemobjectdef->is_struct()) break;
263 for (uoffset_t i = 0; i < vec->size(); i++) {
264 auto loc = vec->Data() + i * sizeof(uoffset_t);
265 if (DagCheck(loc)) continue; // This offset already visited.
266 auto dest = loc + vec->Get(i);
267 Straddle<uoffset_t, 1>(loc, dest, loc);
269 ResizeTable(*elemobjectdef, reinterpret_cast<Table *>(dest));
273 case reflection::Union: {
274 ResizeTable(GetUnionType(schema_, objectdef, fielddef, *table),
275 reinterpret_cast<Table *>(ref));
278 case reflection::String: break;
279 default: FLATBUFFERS_ASSERT(false);
282 // Check if the vtable offset points beyond the insertion point.
283 // Must do this last, since GetOptionalFieldOffset above still reads
285 Straddle<soffset_t, -1>(table, vtable, table);
289 void operator=(const ResizeContext &rc);
292 const reflection::Schema &schema_;
295 std::vector<uint8_t> &buf_;
296 std::vector<uint8_t> dag_check_;
299 void SetString(const reflection::Schema &schema, const std::string &val,
300 const String *str, std::vector<uint8_t> *flatbuf,
301 const reflection::Object *root_table) {
302 auto delta = static_cast<int>(val.size()) - static_cast<int>(str->size());
303 auto str_start = static_cast<uoffset_t>(
304 reinterpret_cast<const uint8_t *>(str) - vector_data(*flatbuf));
305 auto start = str_start + static_cast<uoffset_t>(sizeof(uoffset_t));
307 // Clear the old string, since we don't want parts of it remaining.
308 memset(vector_data(*flatbuf) + start, 0, str->size());
309 // Different size, we must expand (or contract).
310 ResizeContext(schema, start, delta, flatbuf, root_table);
311 // Set the new length.
312 WriteScalar(vector_data(*flatbuf) + str_start,
313 static_cast<uoffset_t>(val.size()));
315 // Copy new data. Safe because we created the right amount of space.
316 memcpy(vector_data(*flatbuf) + start, val.c_str(), val.size() + 1);
319 uint8_t *ResizeAnyVector(const reflection::Schema &schema, uoffset_t newsize,
320 const VectorOfAny *vec, uoffset_t num_elems,
321 uoffset_t elem_size, std::vector<uint8_t> *flatbuf,
322 const reflection::Object *root_table) {
323 auto delta_elem = static_cast<int>(newsize) - static_cast<int>(num_elems);
324 auto delta_bytes = delta_elem * static_cast<int>(elem_size);
326 reinterpret_cast<const uint8_t *>(vec) - vector_data(*flatbuf);
327 auto start = static_cast<uoffset_t>(vec_start + sizeof(uoffset_t) +
328 elem_size * num_elems);
330 if (delta_elem < 0) {
331 // Clear elements we're throwing away, since some might remain in the
333 auto size_clear = -delta_elem * elem_size;
334 memset(vector_data(*flatbuf) + start - size_clear, 0, size_clear);
336 ResizeContext(schema, start, delta_bytes, flatbuf, root_table);
337 WriteScalar(vector_data(*flatbuf) + vec_start, newsize); // Length field.
338 // Set new elements to 0.. this can be overwritten by the caller.
339 if (delta_elem > 0) {
340 memset(vector_data(*flatbuf) + start, 0, delta_elem * elem_size);
343 return vector_data(*flatbuf) + start;
346 const uint8_t *AddFlatBuffer(std::vector<uint8_t> &flatbuf,
347 const uint8_t *newbuf, size_t newlen) {
348 // Align to sizeof(uoffset_t) past sizeof(largest_scalar_t) since we're
349 // going to chop off the root offset.
350 while ((flatbuf.size() & (sizeof(uoffset_t) - 1)) ||
351 !(flatbuf.size() & (sizeof(largest_scalar_t) - 1))) {
352 flatbuf.push_back(0);
354 auto insertion_point = static_cast<uoffset_t>(flatbuf.size());
355 // Insert the entire FlatBuffer minus the root pointer.
356 flatbuf.insert(flatbuf.end(), newbuf + sizeof(uoffset_t), newbuf + newlen);
357 auto root_offset = ReadScalar<uoffset_t>(newbuf) - sizeof(uoffset_t);
358 return vector_data(flatbuf) + insertion_point + root_offset;
361 void CopyInline(FlatBufferBuilder &fbb, const reflection::Field &fielddef,
362 const Table &table, size_t align, size_t size) {
364 fbb.PushBytes(table.GetStruct<const uint8_t *>(fielddef.offset()), size);
365 fbb.TrackField(fielddef.offset(), fbb.GetSize());
368 Offset<const Table *> CopyTable(FlatBufferBuilder &fbb,
369 const reflection::Schema &schema,
370 const reflection::Object &objectdef,
371 const Table &table, bool use_string_pooling) {
372 // Before we can construct the table, we have to first generate any
373 // subobjects, and collect their offsets.
374 std::vector<uoffset_t> offsets;
375 auto fielddefs = objectdef.fields();
376 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
377 auto &fielddef = **it;
378 // Skip if field is not present in the source.
379 if (!table.CheckField(fielddef.offset())) continue;
380 uoffset_t offset = 0;
381 switch (fielddef.type()->base_type()) {
382 case reflection::String: {
383 offset = use_string_pooling
384 ? fbb.CreateSharedString(GetFieldS(table, fielddef)).o
385 : fbb.CreateString(GetFieldS(table, fielddef)).o;
388 case reflection::Obj: {
389 auto &subobjectdef = *schema.objects()->Get(fielddef.type()->index());
390 if (!subobjectdef.is_struct()) {
392 CopyTable(fbb, schema, subobjectdef, *GetFieldT(table, fielddef))
397 case reflection::Union: {
398 auto &subobjectdef = GetUnionType(schema, objectdef, fielddef, table);
400 CopyTable(fbb, schema, subobjectdef, *GetFieldT(table, fielddef)).o;
403 case reflection::Vector: {
405 table.GetPointer<const Vector<Offset<Table>> *>(fielddef.offset());
406 auto element_base_type = fielddef.type()->element();
408 element_base_type == reflection::Obj
409 ? schema.objects()->Get(fielddef.type()->index())
411 switch (element_base_type) {
412 case reflection::String: {
413 std::vector<Offset<const String *>> elements(vec->size());
414 auto vec_s = reinterpret_cast<const Vector<Offset<String>> *>(vec);
415 for (uoffset_t i = 0; i < vec_s->size(); i++) {
416 elements[i] = use_string_pooling
417 ? fbb.CreateSharedString(vec_s->Get(i)).o
418 : fbb.CreateString(vec_s->Get(i)).o;
420 offset = fbb.CreateVector(elements).o;
423 case reflection::Obj: {
424 if (!elemobjectdef->is_struct()) {
425 std::vector<Offset<const Table *>> elements(vec->size());
426 for (uoffset_t i = 0; i < vec->size(); i++) {
428 CopyTable(fbb, schema, *elemobjectdef, *vec->Get(i));
430 offset = fbb.CreateVector(elements).o;
434 FLATBUFFERS_FALLTHROUGH(); // fall thru
435 default: { // Scalars and structs.
436 auto element_size = GetTypeSize(element_base_type);
437 if (elemobjectdef && elemobjectdef->is_struct())
438 element_size = elemobjectdef->bytesize();
439 fbb.StartVector(vec->size(), element_size);
440 fbb.PushBytes(vec->Data(), element_size * vec->size());
441 offset = fbb.EndVector(vec->size());
450 if (offset) { offsets.push_back(offset); }
452 // Now we can build the actual table from either offsets or scalar data.
453 auto start = objectdef.is_struct() ? fbb.StartStruct(objectdef.minalign())
455 size_t offset_idx = 0;
456 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
457 auto &fielddef = **it;
458 if (!table.CheckField(fielddef.offset())) continue;
459 auto base_type = fielddef.type()->base_type();
461 case reflection::Obj: {
462 auto &subobjectdef = *schema.objects()->Get(fielddef.type()->index());
463 if (subobjectdef.is_struct()) {
464 CopyInline(fbb, fielddef, table, subobjectdef.minalign(),
465 subobjectdef.bytesize());
469 FLATBUFFERS_FALLTHROUGH(); // fall thru
470 case reflection::Union:
471 case reflection::String:
472 case reflection::Vector:
473 fbb.AddOffset(fielddef.offset(), Offset<void>(offsets[offset_idx++]));
475 default: { // Scalars.
476 auto size = GetTypeSize(base_type);
477 CopyInline(fbb, fielddef, table, size, size);
482 FLATBUFFERS_ASSERT(offset_idx == offsets.size());
483 if (objectdef.is_struct()) {
485 return fbb.EndStruct();
487 return fbb.EndTable(start);
491 bool VerifyStruct(flatbuffers::Verifier &v,
492 const flatbuffers::Table &parent_table,
493 voffset_t field_offset, const reflection::Object &obj,
495 auto offset = parent_table.GetOptionalFieldOffset(field_offset);
496 if (required && !offset) { return false; }
499 v.Verify(reinterpret_cast<const uint8_t *>(&parent_table), offset,
503 bool VerifyVectorOfStructs(flatbuffers::Verifier &v,
504 const flatbuffers::Table &parent_table,
505 voffset_t field_offset,
506 const reflection::Object &obj, bool required) {
507 auto p = parent_table.GetPointer<const uint8_t *>(field_offset);
508 if (required && !p) { return false; }
510 return !p || v.VerifyVectorOrString(p, obj.bytesize());
513 // forward declare to resolve cyclic deps between VerifyObject and VerifyVector
514 bool VerifyObject(flatbuffers::Verifier &v, const reflection::Schema &schema,
515 const reflection::Object &obj,
516 const flatbuffers::Table *table, bool required);
518 bool VerifyUnion(flatbuffers::Verifier &v, const reflection::Schema &schema,
519 uint8_t utype, const uint8_t *elem,
520 const reflection::Field &union_field) {
521 if (!utype) return true; // Not present.
522 auto fb_enum = schema.enums()->Get(union_field.type()->index());
523 if (utype >= fb_enum->values()->size()) return false;
524 auto elem_type = fb_enum->values()->Get(utype)->union_type();
525 switch (elem_type->base_type()) {
526 case reflection::Obj: {
527 auto elem_obj = schema.objects()->Get(elem_type->index());
528 if (elem_obj->is_struct()) {
529 return v.VerifyFromPointer(elem, elem_obj->bytesize());
531 return VerifyObject(v, schema, *elem_obj,
532 reinterpret_cast<const flatbuffers::Table *>(elem),
536 case reflection::String:
537 return v.VerifyString(
538 reinterpret_cast<const flatbuffers::String *>(elem));
544 bool VerifyVector(flatbuffers::Verifier &v, const reflection::Schema &schema,
545 const flatbuffers::Table &table,
546 const reflection::Field &vec_field) {
547 FLATBUFFERS_ASSERT(vec_field.type()->base_type() == reflection::Vector);
548 if (!table.VerifyField<uoffset_t>(v, vec_field.offset())) return false;
550 switch (vec_field.type()->element()) {
551 case reflection::UType:
552 return v.VerifyVector(flatbuffers::GetFieldV<uint8_t>(table, vec_field));
553 case reflection::Bool:
554 case reflection::Byte:
555 case reflection::UByte:
556 return v.VerifyVector(flatbuffers::GetFieldV<int8_t>(table, vec_field));
557 case reflection::Short:
558 case reflection::UShort:
559 return v.VerifyVector(flatbuffers::GetFieldV<int16_t>(table, vec_field));
560 case reflection::Int:
561 case reflection::UInt:
562 return v.VerifyVector(flatbuffers::GetFieldV<int32_t>(table, vec_field));
563 case reflection::Long:
564 case reflection::ULong:
565 return v.VerifyVector(flatbuffers::GetFieldV<int64_t>(table, vec_field));
566 case reflection::Float:
567 return v.VerifyVector(flatbuffers::GetFieldV<float>(table, vec_field));
568 case reflection::Double:
569 return v.VerifyVector(flatbuffers::GetFieldV<double>(table, vec_field));
570 case reflection::String: {
572 flatbuffers::GetFieldV<flatbuffers::Offset<flatbuffers::String>>(
574 if (v.VerifyVector(vec_string) && v.VerifyVectorOfStrings(vec_string)) {
580 case reflection::Obj: {
581 auto obj = schema.objects()->Get(vec_field.type()->index());
582 if (obj->is_struct()) {
583 return VerifyVectorOfStructs(v, table, vec_field.offset(), *obj,
584 vec_field.required());
587 flatbuffers::GetFieldV<flatbuffers::Offset<flatbuffers::Table>>(
589 if (!v.VerifyVector(vec)) return false;
590 if (!vec) return true;
591 for (uoffset_t j = 0; j < vec->size(); j++) {
592 if (!VerifyObject(v, schema, *obj, vec->Get(j), true)) {
599 case reflection::Union: {
600 auto vec = flatbuffers::GetFieldV<flatbuffers::Offset<uint8_t>>(table,
602 if (!v.VerifyVector(vec)) return false;
603 if (!vec) return true;
604 auto type_vec = table.GetPointer<Vector<uint8_t> *>
605 (vec_field.offset() - sizeof(voffset_t));
606 if (!v.VerifyVector(type_vec)) return false;
607 for (uoffset_t j = 0; j < vec->size(); j++) {
608 // get union type from the prev field
609 auto utype = type_vec->Get(j);
610 auto elem = vec->Get(j);
611 if (!VerifyUnion(v, schema, utype, elem, vec_field))
616 case reflection::Vector:
617 case reflection::None:
619 FLATBUFFERS_ASSERT(false);
624 bool VerifyObject(flatbuffers::Verifier &v, const reflection::Schema &schema,
625 const reflection::Object &obj,
626 const flatbuffers::Table *table, bool required) {
627 if (!table) return !required;
628 if (!table->VerifyTableStart(v)) return false;
629 for (uoffset_t i = 0; i < obj.fields()->size(); i++) {
630 auto field_def = obj.fields()->Get(i);
631 switch (field_def->type()->base_type()) {
632 case reflection::None: FLATBUFFERS_ASSERT(false); break;
633 case reflection::UType:
634 if (!table->VerifyField<uint8_t>(v, field_def->offset())) return false;
636 case reflection::Bool:
637 case reflection::Byte:
638 case reflection::UByte:
639 if (!table->VerifyField<int8_t>(v, field_def->offset())) return false;
641 case reflection::Short:
642 case reflection::UShort:
643 if (!table->VerifyField<int16_t>(v, field_def->offset())) return false;
645 case reflection::Int:
646 case reflection::UInt:
647 if (!table->VerifyField<int32_t>(v, field_def->offset())) return false;
649 case reflection::Long:
650 case reflection::ULong:
651 if (!table->VerifyField<int64_t>(v, field_def->offset())) return false;
653 case reflection::Float:
654 if (!table->VerifyField<float>(v, field_def->offset())) return false;
656 case reflection::Double:
657 if (!table->VerifyField<double>(v, field_def->offset())) return false;
659 case reflection::String:
660 if (!table->VerifyField<uoffset_t>(v, field_def->offset()) ||
661 !v.VerifyString(flatbuffers::GetFieldS(*table, *field_def))) {
665 case reflection::Vector:
666 if (!VerifyVector(v, schema, *table, *field_def))
669 case reflection::Obj: {
670 auto child_obj = schema.objects()->Get(field_def->type()->index());
671 if (child_obj->is_struct()) {
672 if (!VerifyStruct(v, *table, field_def->offset(), *child_obj,
673 field_def->required())) {
677 if (!VerifyObject(v, schema, *child_obj,
678 flatbuffers::GetFieldT(*table, *field_def),
679 field_def->required())) {
685 case reflection::Union: {
686 // get union type from the prev field
687 voffset_t utype_offset = field_def->offset() - sizeof(voffset_t);
688 auto utype = table->GetField<uint8_t>(utype_offset, 0);
689 auto uval = reinterpret_cast<const uint8_t *>(
690 flatbuffers::GetFieldT(*table, *field_def));
691 if (!VerifyUnion(v, schema, utype, uval, *field_def)) {
697 FLATBUFFERS_ASSERT(false);
702 if (!v.EndTable()) return false;
707 bool Verify(const reflection::Schema &schema, const reflection::Object &root,
708 const uint8_t *buf, size_t length) {
709 Verifier v(buf, length);
710 return VerifyObject(v, schema, root, flatbuffers::GetAnyRoot(buf), true);
713 } // namespace flatbuffers