17 #ifndef FLATBUFFERS_H_ 18 #define FLATBUFFERS_H_ 37 FLATBUFFERS_LITTLEENDIAN);
41 template<
typename T> FLATBUFFERS_CONSTEXPR
size_t AlignOf() {
47 return __alignof__(T);
66 static const size_t element_stride =
sizeof(T);
67 static return_type
Read(
const uint8_t *p, uoffset_t i) {
68 return EndianScalar((reinterpret_cast<const T *>(p))[i]);
74 static const size_t element_stride =
sizeof(uoffset_t);
75 static return_type
Read(
const uint8_t *p, uoffset_t i) {
76 p += i *
sizeof(uoffset_t);
77 return reinterpret_cast<return_type
>(p + ReadScalar<uoffset_t>(p));
83 static const size_t element_stride =
sizeof(T);
84 static return_type
Read(
const uint8_t *p, uoffset_t i) {
85 return reinterpret_cast<const T *
>(p + i *
sizeof(T));
113 return data_ == other.
data_;
117 return data_ < other.
data_;
121 return data_ != other.
data_;
189 uoffset_t
size()
const {
return EndianScalar(length_); }
192 uoffset_t
Length()
const {
return size(); }
197 return_type
Get(uoffset_t i)
const {
207 template<
typename E> E
GetEnum(uoffset_t i)
const {
208 return static_cast<E
>(Get(i));
213 template<
typename U>
const U *
GetAs(uoffset_t i)
const {
214 return reinterpret_cast<const U *
>(Get(i));
220 return reinterpret_cast<const String *
>(Get(i));
224 return reinterpret_cast<const void *
>(Data() +
o);
237 WriteScalar(
data() + i, val);
245 static_assert(
sizeof(T) ==
sizeof(uoffset_t),
"Unrelated types");
246 WriteScalar(
data() + i,
247 static_cast<uoffset_t>(val - (Data() + i *
sizeof(uoffset_t))));
258 return reinterpret_cast<const uint8_t *
>(&length_ + 1);
261 uint8_t *
Data() {
return reinterpret_cast<uint8_t *
>(&length_ + 1); }
264 const T *
data()
const {
return reinterpret_cast<const T *
>(Data()); }
265 T *
data() {
return reinterpret_cast<T *
>(Data()); }
268 void *search_result = std::bsearch(
271 if (!search_result) {
275 const uint8_t *element =
reinterpret_cast<const uint8_t *
>(search_result);
292 template<
typename K>
static int KeyCompare(
const void *ap,
const void *bp) {
293 const K *key =
reinterpret_cast<const K *
>(ap);
294 const uint8_t *
data =
reinterpret_cast<const uint8_t *
>(bp);
299 return -table->KeyCompareWithValue(*key);
307 uoffset_t
size()
const {
return EndianScalar(length_); }
310 return reinterpret_cast<const uint8_t *
>(&length_ + 1);
312 uint8_t *
Data() {
return reinterpret_cast<uint8_t *
>(&length_ + 1); }
323 #ifndef FLATBUFFERS_CPP98_STL 324 template<
typename T,
typename U>
326 static_assert(std::is_base_of<T, U>::value,
"Unrelated types");
330 template<
typename T,
typename U>
332 static_assert(std::is_base_of<T, U>::value,
"Unrelated types");
340 return v ? v->
Length() : 0;
344 const char *
c_str()
const {
return reinterpret_cast<const char *
>(Data()); }
345 std::string
str()
const {
return std::string(c_str(), Length()); }
348 #ifdef FLATBUFFERS_HAS_STRING_VIEW 349 flatbuffers::string_view string_view()
const {
350 return flatbuffers::string_view(c_str(), Length());
352 #endif // FLATBUFFERS_HAS_STRING_VIEW 356 return strcmp(c_str(), o.
c_str()) < 0;
367 virtual uint8_t *allocate(
size_t size) = 0;
370 virtual void deallocate(uint8_t *p,
size_t size) = 0;
378 size_t new_size,
size_t in_use_back,
379 size_t in_use_front) {
381 uint8_t *new_p = allocate(new_size);
382 memcpy_downward(old_p, old_size, new_p, new_size, in_use_back,
384 deallocate(old_p, old_size);
394 uint8_t *new_p,
size_t new_size,
395 size_t in_use_back,
size_t in_use_front) {
396 memcpy(new_p + new_size - in_use_back, old_p + old_size - in_use_back,
398 memcpy(new_p, old_p, in_use_front);
405 virtual uint8_t *
allocate(
size_t size) FLATBUFFERS_OVERRIDE {
406 return new uint8_t[size];
409 virtual void deallocate(uint8_t *p,
size_t) FLATBUFFERS_OVERRIDE {
425 : allocator_(nullptr),
426 own_allocator_(false),
433 size_t reserved, uint8_t *cur,
size_t sz)
434 : allocator_(allocator),
435 own_allocator_(own_allocator),
444 : allocator_(other.allocator_),
445 own_allocator_(other.own_allocator_),
447 reserved_(other.reserved_),
456 allocator_ = other.allocator_;
457 own_allocator_ = other.own_allocator_;
459 reserved_ = other.reserved_;
470 const uint8_t *
data()
const {
return cur_; }
472 uint8_t *
data() {
return cur_; }
474 size_t size()
const {
return size_; }
477 #if 0 // disabled for now due to the ordering of classes in this header 479 bool Verify()
const {
480 Verifier verifier(
data(), size());
481 return verifier.Verify<T>(
nullptr);
485 const T* GetRoot()
const {
486 return flatbuffers::GetRoot<T>(
data());
491 return flatbuffers::GetRoot<T>(
data());
498 FLATBUFFERS_DELETE_FUNC(
512 allocator_->deallocate(buf_, reserved_);
514 if (own_allocator_ && allocator_) {
delete allocator_; }
520 allocator_ =
nullptr;
521 own_allocator_ =
false;
540 size_t buffer_minalign)
542 own_allocator_(own_allocator),
543 initial_size_(initial_size),
544 buffer_minalign_(buffer_minalign),
555 allocator_->deallocate(buf_, reserved_);
557 if (own_allocator_ && allocator_) {
delete allocator_; }
563 allocator_->deallocate(buf_, reserved_);
571 cur_ = buf_ + reserved_;
585 DetachedBuffer fb(allocator_, own_allocator_, buf_, reserved_, cur_,
587 allocator_ =
nullptr;
588 own_allocator_ =
false;
596 if (len > static_cast<size_t>(cur_ - scratch_)) { reallocate(len); }
604 cur_ -= ensure_space(len);
611 return static_cast<uoffset_t
>(reserved_ - (cur_ - buf_));
615 return static_cast<uoffset_t
>(scratch_ - buf_);
635 uint8_t *
data_at(
size_t offset)
const {
return buf_ + reserved_ - offset; }
637 void push(
const uint8_t *bytes,
size_t num) {
638 memcpy(make_space(num), bytes, num);
642 template<
typename T>
void push_small(
const T &little_endian_t) {
643 make_space(
sizeof(T));
644 *
reinterpret_cast<T *
>(cur_) = little_endian_t;
648 ensure_space(
sizeof(T));
649 *
reinterpret_cast<T *
>(scratch_) = t;
650 scratch_ +=
sizeof(T);
655 void fill(
size_t zero_pad_bytes) {
656 make_space(zero_pad_bytes);
657 for (
size_t i = 0; i < zero_pad_bytes; i++) cur_[i] = 0;
662 memset(make_space(zero_pad_bytes), 0, zero_pad_bytes);
665 void pop(
size_t bytes_to_remove) { cur_ += bytes_to_remove; }
666 void scratch_pop(
size_t bytes_to_remove) { scratch_ -= bytes_to_remove; }
675 size_t initial_size_;
676 size_t buffer_minalign_;
682 void reallocate(
size_t len) {
684 auto old_reserved = reserved_;
685 auto old_size = size();
686 auto old_scratch_size = scratch_size();
687 reserved_ += (std::max)(len,
688 old_reserved ? old_reserved / 2 : initial_size_);
689 reserved_ = (reserved_ + buffer_minalign_ - 1) & ~(buffer_minalign_ - 1);
692 old_size, old_scratch_size);
694 buf_ = allocator_->
allocate(reserved_);
696 cur_ = buf_ + reserved_ - old_size;
697 scratch_ = buf_ + old_scratch_size;
704 const int fixed_fields = 2;
705 return static_cast<voffset_t
>((field_id + fixed_fields) *
sizeof(voffset_t));
708 template<
typename T,
typename Alloc>
709 const T *
data(
const std::vector<T, Alloc> &v) {
710 return v.empty() ?
nullptr : &v.front();
712 template<
typename T,
typename Alloc> T *
data(std::vector<T, Alloc> &v) {
713 return v.empty() ?
nullptr : &v.front();
742 bool own_allocator =
false,
743 size_t buffer_minalign =
744 AlignOf<largest_scalar_t>())
745 : buf_(initial_size, allocator, own_allocator, buffer_minalign),
751 force_defaults_(false),
752 dedup_vtables_(true),
753 string_pool(nullptr) {
758 if (string_pool)
delete string_pool;
774 if (string_pool) string_pool->clear();
779 uoffset_t
GetSize()
const {
return buf_.size(); }
800 return buf_.release();
807 return buf_.release();
821 void Finished()
const {
841 void Pad(
size_t num_bytes) { buf_.fill(num_bytes); }
843 void TrackMinAlign(
size_t elem_size) {
844 if (elem_size > minalign_) minalign_ = elem_size;
847 void Align(
size_t elem_size) {
848 TrackMinAlign(elem_size);
849 buf_.fill(PaddingBytes(buf_.size(), elem_size));
852 void PushFlatBuffer(
const uint8_t *bytes,
size_t size) {
853 PushBytes(bytes, size);
857 void PushBytes(
const uint8_t *bytes,
size_t size) { buf_.push(bytes, size); }
859 void PopBytes(
size_t amount) { buf_.pop(amount); }
861 template<
typename T>
void AssertScalarT() {
867 template<
typename T> uoffset_t PushElement(T element) {
869 T litle_endian_element = EndianScalar(element);
871 buf_.push_small(litle_endian_element);
875 template<
typename T> uoffset_t PushElement(
Offset<T> off) {
877 return PushElement(ReferTo(off.
o));
882 void TrackField(voffset_t field, uoffset_t off) {
884 buf_.scratch_push_small(fl);
886 max_voffset_ = (std::max)(max_voffset_, field);
890 template<
typename T>
void AddElement(voffset_t field, T e, T def) {
892 if (e == def && !force_defaults_)
return;
893 auto off = PushElement(e);
894 TrackField(field, off);
897 template<
typename T>
void AddOffset(voffset_t field,
Offset<T> off) {
899 AddElement(field, ReferTo(off.
o), static_cast<uoffset_t>(0));
902 template<
typename T>
void AddStruct(voffset_t field,
const T *structptr) {
903 if (!structptr)
return;
905 buf_.push_small(*structptr);
906 TrackField(field, GetSize());
909 void AddStructOffset(voffset_t field, uoffset_t off) {
910 TrackField(field, off);
916 uoffset_t ReferTo(uoffset_t off) {
918 Align(
sizeof(uoffset_t));
921 return GetSize() - off +
static_cast<uoffset_t
>(
sizeof(uoffset_t));
940 uoffset_t StartTable() {
949 uoffset_t EndTable(uoffset_t start) {
954 auto vtableoffsetloc = PushElement<soffset_t>(0);
961 (std::max)(static_cast<voffset_t>(max_voffset_ +
sizeof(voffset_t)),
963 buf_.fill_big(max_voffset_);
964 auto table_object_size = vtableoffsetloc - start;
967 WriteScalar<voffset_t>(buf_.data() +
sizeof(voffset_t),
968 static_cast<voffset_t>(table_object_size));
969 WriteScalar<voffset_t>(buf_.data(), max_voffset_);
971 for (
auto it = buf_.scratch_end() - num_field_loc *
sizeof(
FieldLoc);
972 it < buf_.scratch_end(); it +=
sizeof(
FieldLoc)) {
973 auto field_location =
reinterpret_cast<FieldLoc *
>(it);
974 auto pos =
static_cast<voffset_t
>(vtableoffsetloc - field_location->off);
977 !ReadScalar<voffset_t>(buf_.data() + field_location->id));
978 WriteScalar<voffset_t>(buf_.data() + field_location->id, pos);
981 auto vt1 =
reinterpret_cast<voffset_t *
>(buf_.data());
982 auto vt1_size = ReadScalar<voffset_t>(vt1);
983 auto vt_use = GetSize();
986 if (dedup_vtables_) {
987 for (
auto it = buf_.scratch_data(); it < buf_.scratch_end();
988 it +=
sizeof(uoffset_t)) {
989 auto vt_offset_ptr =
reinterpret_cast<uoffset_t *
>(it);
990 auto vt2 =
reinterpret_cast<voffset_t *
>(buf_.data_at(*vt_offset_ptr));
991 auto vt2_size = *vt2;
992 if (vt1_size != vt2_size || memcmp(vt2, vt1, vt1_size))
continue;
993 vt_use = *vt_offset_ptr;
994 buf_.pop(GetSize() - vtableoffsetloc);
999 if (vt_use == GetSize()) { buf_.scratch_push_small(vt_use); }
1005 WriteScalar(buf_.data_at(vtableoffsetloc),
1006 static_cast<soffset_t
>(vt_use) -
1007 static_cast<soffset_t>(vtableoffsetloc));
1010 return vtableoffsetloc;
1014 uoffset_t EndTable(uoffset_t start, voffset_t ) {
1015 return EndTable(start);
1020 template<
typename T>
void Required(
Offset<T> table, voffset_t field) {
1021 auto table_ptr = buf_.data_at(table.
o);
1022 auto vtable_ptr = table_ptr - ReadScalar<soffset_t>(table_ptr);
1023 bool ok = ReadScalar<voffset_t>(vtable_ptr + field) != 0;
1029 uoffset_t StartStruct(
size_t alignment) {
1034 uoffset_t EndStruct() {
return GetSize(); }
1036 void ClearOffsets() {
1037 buf_.scratch_pop(num_field_loc *
sizeof(
FieldLoc));
1044 void PreAlign(
size_t len,
size_t alignment) {
1045 TrackMinAlign(alignment);
1046 buf_.fill(PaddingBytes(GetSize() + len, alignment));
1048 template<
typename T>
void PreAlign(
size_t len) {
1050 PreAlign(len,
sizeof(T));
1060 PreAlign<uoffset_t>(len + 1);
1062 PushBytes(reinterpret_cast<const uint8_t *>(str), len);
1063 PushElement(static_cast<uoffset_t>(len));
1071 return CreateString(str, strlen(str));
1078 return CreateString(str, strlen(str));
1085 return CreateString(str.c_str(), str.length());
1089 #ifdef FLATBUFFERS_HAS_STRING_VIEW 1094 return CreateString(str.data(), str.size());
1096 #endif // FLATBUFFERS_HAS_STRING_VIEW 1103 return str ? CreateString(str->
c_str(), str->
Length()) : 0;
1111 return CreateString(str.c_str(), str.length());
1123 auto size_before_string = buf_.size();
1126 auto off = CreateString(str, len);
1127 auto it = string_pool->find(off);
1129 if (it != string_pool->end()) {
1131 buf_.pop(buf_.size() - size_before_string);
1135 string_pool->insert(off);
1145 return CreateSharedString(str, strlen(str));
1154 return CreateSharedString(str.c_str(), str.length());
1163 return CreateSharedString(str->
c_str(), str->
Length());
1167 uoffset_t EndVector(
size_t len) {
1170 return PushElement(static_cast<uoffset_t>(len));
1173 void StartVector(
size_t len,
size_t elemsize) {
1176 PreAlign<uoffset_t>(len * elemsize);
1177 PreAlign(len * elemsize, elemsize);
1185 void ForceVectorAlignment(
size_t len,
size_t elemsize,
size_t alignment) {
1186 PreAlign(len * elemsize, alignment);
1202 StartVector(len,
sizeof(T));
1204 #if FLATBUFFERS_LITTLEENDIAN 1205 PushBytes(reinterpret_cast<const uint8_t *>(v), len *
sizeof(T));
1207 if (
sizeof(T) == 1) {
1208 PushBytes(reinterpret_cast<const uint8_t *>(v), len);
1210 for (
auto i = len; i > 0; ) {
1211 PushElement(v[--i]);
1219 template<
typename T>
1222 for (
auto i = len; i > 0;) { PushElement(v[--i]); }
1233 return CreateVector(
data(v), v.size());
1240 StartVector(v.size(),
sizeof(uint8_t));
1241 for (
auto i = v.size(); i > 0;) {
1242 PushElement(static_cast<uint8_t>(v[--i]));
1248 #ifndef FLATBUFFERS_CPP98_STL 1257 const std::function<T (
size_t i)> &f) {
1258 std::vector<T> elems(vector_size);
1259 for (
size_t i = 0; i < vector_size; i++) elems[i] = f(i);
1260 return CreateVector(elems);
1274 template<
typename T,
typename F,
typename S>
1276 std::vector<T> elems(vector_size);
1277 for (
size_t i = 0; i < vector_size; i++) elems[i] = f(i, state);
1278 return CreateVector(elems);
1288 const std::vector<std::string> &v) {
1289 std::vector<Offset<String>> offsets(v.size());
1290 for (
size_t i = 0; i < v.size(); i++) offsets[i] = CreateString(v[i]);
1291 return CreateVector(offsets);
1301 template<
typename T>
1303 StartVector(len *
sizeof(T) / AlignOf<T>(), AlignOf<T>());
1304 PushBytes(reinterpret_cast<const uint8_t *>(v),
sizeof(T) * len);
1316 template<
typename T,
typename S>
1319 extern T Pack(
const S &);
1320 typedef T (*Pack_t)(
const S &);
1321 std::vector<T> vv(len);
1322 std::transform(v, v + len, vv.begin(), *(Pack_t)&Pack);
1323 return CreateVectorOfStructs<T>(vv.data(), vv.size());
1327 #ifndef FLATBUFFERS_CPP98_STL 1337 size_t vector_size,
const std::function<
void(
size_t i, T *)> &filler) {
1338 T* structs = StartVectorOfStructs<T>(vector_size);
1339 for (
size_t i = 0; i < vector_size; i++) {
1343 return EndVectorOfStructs<T>(vector_size);
1357 template<
typename T,
typename F,
typename S>
1360 T *structs = StartVectorOfStructs<T>(vector_size);
1361 for (
size_t i = 0; i < vector_size; i++) {
1362 f(i, structs, state);
1365 return EndVectorOfStructs<T>(vector_size);
1374 template<
typename T,
typename Alloc>
1376 const std::vector<T, Alloc> &v) {
1377 return CreateVectorOfStructs(
data(v), v.size());
1388 template<
typename T,
typename S>
1390 const std::vector<S> &v) {
1391 return CreateVectorOfNativeStructs<T, S>(
data(v), v.size());
1395 template<
typename T>
struct StructKeyComparator {
1396 bool operator()(
const T &a,
const T &b)
const {
1397 return a.KeyCompareLessThan(&b);
1401 StructKeyComparator &operator=(
const StructKeyComparator &);
1412 template<
typename T>
1414 return CreateVectorOfSortedStructs(
data(*v), v->size());
1425 template<
typename T,
typename S>
1427 std::vector<S> *v) {
1428 return CreateVectorOfSortedNativeStructs<T, S>(
data(*v), v->size());
1439 template<
typename T>
1441 std::sort(v, v + len, StructKeyComparator<T>());
1442 return CreateVectorOfStructs(v, len);
1454 template<
typename T,
typename S>
1457 extern T Pack(
const S &);
1458 typedef T (*Pack_t)(
const S &);
1459 std::vector<T> vv(len);
1460 std::transform(v, v + len, vv.begin(), *(Pack_t)&Pack);
1461 return CreateVectorOfSortedStructs<T>(vv, len);
1465 template<
typename T>
struct TableKeyComparator {
1468 auto table_a =
reinterpret_cast<T *
>(buf_.data_at(a.
o));
1469 auto table_b =
reinterpret_cast<T *
>(buf_.data_at(b.
o));
1470 return table_a->KeyCompareLessThan(table_b);
1475 TableKeyComparator &operator=(
const TableKeyComparator &);
1487 template<
typename T>
1490 std::sort(v, v + len, TableKeyComparator<T>(buf_));
1491 return CreateVector(v, len);
1501 template<
typename T>
1504 return CreateVectorOfSortedTables(
data(*v), v->size());
1517 StartVector(len, elemsize);
1518 buf_.make_space(len * elemsize);
1519 auto vec_start = GetSize();
1520 auto vec_end = EndVector(len);
1521 *buf = buf_.data_at(vec_start);
1533 template<
typename T>
1535 return CreateUninitializedVector(len,
sizeof(T),
1536 reinterpret_cast<uint8_t **>(buf));
1542 Align(AlignOf<T>());
1543 buf_.push_small(structobj);
1548 static const size_t kFileIdentifierLength = 4;
1553 template<
typename T>
1555 Finish(root.
o, file_identifier,
false);
1565 template<
typename T>
1567 const char *file_identifier =
nullptr) {
1568 Finish(root.
o, file_identifier,
true);
1576 void Finish(uoffset_t root,
const char *file_identifier,
bool size_prefix) {
1578 buf_.clear_scratch();
1580 PreAlign((size_prefix ?
sizeof(uoffset_t) : 0) +
sizeof(uoffset_t) +
1581 (file_identifier ? kFileIdentifierLength : 0),
1583 if (file_identifier) {
1585 PushBytes(reinterpret_cast<const uint8_t *>(file_identifier),
1586 kFileIdentifierLength);
1588 PushElement(ReferTo(root));
1589 if (size_prefix) { PushElement(GetSize()); }
1622 auto stra =
reinterpret_cast<const String *
>(buf_->
data_at(a.
o));
1623 auto strb =
reinterpret_cast<const String *
>(buf_->
data_at(b.
o));
1624 return strncmp(stra->c_str(), strb->c_str(),
1625 (std::min)(stra->size(), strb->size()) + 1) < 0;
1638 StartVector(vector_size *
sizeof(T) / AlignOf<T>(), AlignOf<T>());
1639 return reinterpret_cast<T *
>(buf_.
make_space(vector_size *
sizeof(T)));
1644 template<
typename T>
1653 template<
typename T> T *GetMutableRoot(
void *buf) {
1655 return reinterpret_cast<T *
>(
1656 reinterpret_cast<uint8_t *
>(buf) +
1657 EndianScalar(*reinterpret_cast<uoffset_t *>(buf)));
1660 template<
typename T>
const T *GetRoot(
const void *buf) {
1661 return GetMutableRoot<T>(
const_cast<void *
>(buf));
1664 template<
typename T>
const T *GetSizePrefixedRoot(
const void *buf) {
1665 return GetRoot<T>(
reinterpret_cast<const uint8_t *
>(buf) +
sizeof(uoffset_t));
1671 template<
typename T>
1677 template<
typename T>
1679 return GetMutableTemporaryPointer<T>(fbb, offset);
1689 inline const char *GetBufferIdentifier(
const void *buf,
bool size_prefixed =
false) {
1690 return reinterpret_cast<const char *
>(buf) +
1691 ((size_prefixed) ? 2 *
sizeof(uoffset_t) :
sizeof(uoffset_t));
1695 inline bool BufferHasIdentifier(
const void *buf,
const char *identifier,
bool size_prefixed =
false) {
1696 return strncmp(GetBufferIdentifier(buf, size_prefixed), identifier,
1701 class Verifier FLATBUFFERS_FINAL_CLASS {
1703 Verifier(
const uint8_t *buf,
size_t buf_len, uoffset_t _max_depth = 64,
1704 uoffset_t _max_tables = 1000000)
1706 end_(buf + buf_len),
1708 max_depth_(_max_depth),
1710 max_tables_(_max_tables)
1712 #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE 1720 bool Check(
bool ok)
const {
1722 #ifdef FLATBUFFERS_DEBUG_VERIFICATION_FAILURE 1725 #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE 1727 upper_bound_ = buf_;
1734 bool Verify(
const void *elem,
size_t elem_len)
const {
1736 #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE 1737 auto upper_bound =
reinterpret_cast<const uint8_t *
>(elem) + elem_len;
1738 if (upper_bound_ < upper_bound)
1739 upper_bound_ = upper_bound;
1742 return Check(elem_len <= (
size_t)(end_ - buf_) && elem >= buf_ &&
1743 elem <= end_ - elem_len);
1747 template<
typename T>
bool Verify(
const void *elem)
const {
1748 return Verify(elem,
sizeof(T));
1752 template<
typename T>
bool VerifyTable(
const T *table) {
1753 return !table || table->Verify(*
this);
1757 template<
typename T>
bool Verify(
const Vector<T> *vec)
const {
1759 return !vec || VerifyVector(reinterpret_cast<const uint8_t *>(vec),
1765 return Verify(
reinterpret_cast<const Vector<T> *
>(vec));
1769 bool Verify(
const String *str)
const {
1772 (VerifyVector(reinterpret_cast<const uint8_t *>(str), 1, &end) &&
1774 Check(*end ==
'\0'));
1778 bool VerifyVector(
const uint8_t *vec,
size_t elem_size,
1779 const uint8_t **end)
const {
1781 if (!Verify<uoffset_t>(vec))
return false;
1784 auto size = ReadScalar<uoffset_t>(vec);
1785 auto max_elems = FLATBUFFERS_MAX_BUFFER_SIZE / elem_size;
1786 if (!Check(size < max_elems))
1788 auto byte_size =
sizeof(size) + elem_size * size;
1789 *end = vec + byte_size;
1790 return Verify(vec, byte_size);
1796 for (uoffset_t i = 0; i < vec->size(); i++) {
1797 if (!Verify(vec->Get(i)))
return false;
1804 template<
typename T>
bool VerifyVectorOfTables(
const Vector<
Offset<T>> *vec) {
1806 for (uoffset_t i = 0; i < vec->size(); i++) {
1807 if (!vec->Get(i)->Verify(*
this))
return false;
1813 template<
typename T>
1814 bool VerifyBufferFromStart(
const char *identifier,
const uint8_t *start) {
1816 (
size_t(end_ - start) < 2 *
sizeof(flatbuffers::uoffset_t) ||
1817 !BufferHasIdentifier(start, identifier))) {
1822 auto o = VerifyOffset(start);
1823 return o &&
reinterpret_cast<const T *
>(start +
o)->Verify(*
this)
1824 #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE 1825 && GetComputedSize()
1831 template<
typename T>
bool VerifyBuffer() {
return VerifyBuffer<T>(
nullptr); }
1833 template<
typename T>
bool VerifyBuffer(
const char *identifier) {
1834 return VerifyBufferFromStart<T>(identifier, buf_);
1837 template<
typename T>
bool VerifySizePrefixedBuffer(
const char *identifier) {
1838 return Verify<uoffset_t>(buf_) &&
1839 ReadScalar<uoffset_t>(buf_) == end_ - buf_ -
sizeof(uoffset_t) &&
1840 VerifyBufferFromStart<T>(identifier, buf_ +
sizeof(uoffset_t));
1843 uoffset_t VerifyOffset(
const uint8_t *start)
const {
1844 if (!Verify<uoffset_t>(start))
return false;
1845 auto o = ReadScalar<uoffset_t>(start);
1854 bool VerifyComplexity() {
1857 return Check(depth_ <= max_depth_ && num_tables_ <= max_tables_);
1867 #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE 1869 size_t GetComputedSize()
const {
1870 uintptr_t size = upper_bound_ - buf_;
1872 size = (size - 1 +
sizeof(uoffset_t)) & ~(
sizeof(uoffset_t) - 1);
1873 return (buf_ + size > end_) ? 0 : size;
1879 const uint8_t *buf_;
1880 const uint8_t *end_;
1882 uoffset_t max_depth_;
1883 uoffset_t num_tables_;
1884 uoffset_t max_tables_;
1886 #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE 1887 mutable const uint8_t *upper_bound_;
1895 struct BufferRefBase {};
1896 template<
typename T>
struct BufferRef : BufferRefBase {
1897 BufferRef() : buf(
nullptr), len(0), must_free(
false) {}
1898 BufferRef(uint8_t *_buf, uoffset_t _len)
1899 : buf(_buf), len(_len), must_free(
false) {}
1902 if (must_free) free(buf);
1905 const T *GetRoot()
const {
return flatbuffers::GetRoot<T>(buf); }
1908 Verifier verifier(buf, len);
1909 return verifier.VerifyBuffer<T>(
nullptr);
1921 class Struct FLATBUFFERS_FINAL_CLASS {
1923 template<
typename T> T GetField(uoffset_t
o)
const {
1924 return ReadScalar<T>(&data_[
o]);
1927 template<
typename T> T GetStruct(uoffset_t o)
const {
1928 return reinterpret_cast<T
>(&data_[
o]);
1931 const uint8_t *GetAddressOf(uoffset_t o)
const {
return &data_[
o]; }
1932 uint8_t *GetAddressOf(uoffset_t o) {
return &data_[
o]; }
1942 const uint8_t *GetVTable()
const {
1943 return data_ - ReadScalar<soffset_t>(data_);
1948 voffset_t GetOptionalFieldOffset(voffset_t field)
const {
1950 auto vtable = GetVTable();
1952 auto vtsize = ReadScalar<voffset_t>(vtable);
1955 return field < vtsize ? ReadScalar<voffset_t>(vtable + field) : 0;
1958 template<
typename T> T GetField(voffset_t field, T defaultval)
const {
1959 auto field_offset = GetOptionalFieldOffset(field);
1960 return field_offset ? ReadScalar<T>(data_ + field_offset) : defaultval;
1963 template<
typename P> P GetPointer(voffset_t field) {
1964 auto field_offset = GetOptionalFieldOffset(field);
1965 auto p = data_ + field_offset;
1966 return field_offset ?
reinterpret_cast<P
>(p + ReadScalar<uoffset_t>(p))
1969 template<
typename P> P GetPointer(voffset_t field)
const {
1970 return const_cast<Table *
>(
this)->GetPointer<P>(field);
1973 template<
typename P> P GetStruct(voffset_t field)
const {
1974 auto field_offset = GetOptionalFieldOffset(field);
1975 auto p =
const_cast<uint8_t *
>(data_ + field_offset);
1976 return field_offset ?
reinterpret_cast<P
>(p) :
nullptr;
1979 template<
typename T>
bool SetField(voffset_t field, T val, T def) {
1980 auto field_offset = GetOptionalFieldOffset(field);
1981 if (!field_offset)
return val == def;
1982 WriteScalar(data_ + field_offset, val);
1986 bool SetPointer(voffset_t field,
const uint8_t *val) {
1987 auto field_offset = GetOptionalFieldOffset(field);
1988 if (!field_offset)
return false;
1989 WriteScalar(data_ + field_offset,
1990 static_cast<uoffset_t>(val - (data_ + field_offset)));
1994 uint8_t *GetAddressOf(voffset_t field) {
1995 auto field_offset = GetOptionalFieldOffset(field);
1996 return field_offset ? data_ + field_offset :
nullptr;
1998 const uint8_t *GetAddressOf(voffset_t field)
const {
1999 return const_cast<Table *
>(
this)->GetAddressOf(field);
2002 bool CheckField(voffset_t field)
const {
2003 return GetOptionalFieldOffset(field) != 0;
2008 bool VerifyTableStart(Verifier &verifier)
const {
2010 if (!verifier.Verify<soffset_t>(data_))
return false;
2011 auto vtable = GetVTable();
2013 return verifier.VerifyComplexity() && verifier.Verify<voffset_t>(vtable) &&
2014 (ReadScalar<voffset_t>(vtable) & (
sizeof(voffset_t) - 1)) == 0 &&
2015 verifier.Verify(vtable, ReadScalar<voffset_t>(vtable));
2019 template<
typename T>
2020 bool VerifyField(
const Verifier &verifier, voffset_t field)
const {
2023 auto field_offset = GetOptionalFieldOffset(field);
2025 return !field_offset || verifier.Verify<T>(data_ + field_offset);
2029 template<
typename T>
2030 bool VerifyFieldRequired(
const Verifier &verifier, voffset_t field)
const {
2031 auto field_offset = GetOptionalFieldOffset(field);
2032 return verifier.Check(field_offset != 0) &&
2033 verifier.Verify<T>(data_ + field_offset);
2037 bool VerifyOffset(
const Verifier &verifier, voffset_t field)
const {
2038 auto field_offset = GetOptionalFieldOffset(field);
2039 return !field_offset || verifier.VerifyOffset(data_ + field_offset);
2042 bool VerifyOffsetRequired(
const Verifier &verifier, voffset_t field)
const {
2043 auto field_offset = GetOptionalFieldOffset(field);
2044 return verifier.Check(field_offset != 0) &&
2045 verifier.VerifyOffset(data_ + field_offset);
2052 Table(
const Table &other);
2061 inline const uint8_t *GetBufferStartFromRootPointer(
const void *root) {
2062 auto table =
reinterpret_cast<const Table *
>(root);
2063 auto vtable = table->GetVTable();
2065 auto start = (std::min)(vtable, reinterpret_cast<const uint8_t *>(root));
2067 start =
reinterpret_cast<const uint8_t *
>(
reinterpret_cast<uintptr_t
>(start) &
2068 ~(
sizeof(uoffset_t) - 1));
2079 "file_identifier is assumed to be the same size as uoffset_t");
2080 for (
auto possible_roots = FLATBUFFERS_MAX_ALIGNMENT /
sizeof(uoffset_t) + 1;
2081 possible_roots; possible_roots--) {
2082 start -=
sizeof(uoffset_t);
2083 if (ReadScalar<uoffset_t>(start) + start ==
2084 reinterpret_cast<const uint8_t *
>(root))
2096 inline uoffset_t GetPrefixedSize(
const uint8_t* buf){
return ReadScalar<uoffset_t>(buf); }
2101 struct NativeTable {};
2111 typedef uint64_t hash_value_t;
2113 #ifdef FLATBUFFERS_CPP98_STL 2114 typedef void (*resolver_function_t)(
void **pointer_adr, hash_value_t hash);
2115 typedef hash_value_t (*rehasher_function_t)(
void *pointer);
2117 typedef std::function<void (void **pointer_adr, hash_value_t hash)>
2118 resolver_function_t;
2119 typedef std::function<hash_value_t (void *pointer)> rehasher_function_t;
2130 template<
typename T>
bool IsFieldPresent(
const T *table, voffset_t field) {
2132 return reinterpret_cast<const Table *
>(table)->CheckField(field);
2138 inline int LookupEnum(
const char **names,
const char *name) {
2139 for (
const char **p = names; *p; p++)
2140 if (!strcmp(*p, name))
return static_cast<int>(p - names);
2155 #if defined(_MSC_VER) 2156 #define FLATBUFFERS_MANUALLY_ALIGNED_STRUCT(alignment) \ 2157 __pragma(pack(1)); \ 2158 struct __declspec(align(alignment)) 2159 #define FLATBUFFERS_STRUCT_END(name, size) \ 2161 static_assert(sizeof(name) == size, "compiler breaks packing rules") 2162 #elif defined(__GNUC__) || defined(__clang__) 2163 #define FLATBUFFERS_MANUALLY_ALIGNED_STRUCT(alignment) \ 2164 _Pragma("pack(1)") \ 2165 struct __attribute__((aligned(alignment))) 2166 #define FLATBUFFERS_STRUCT_END(name, size) \ 2168 static_assert(sizeof(name) == size, "compiler breaks packing rules") 2170 #error Unknown compiler, please define structure alignment macros 2184 enum SequenceType { ST_TABLE, ST_STRUCT, ST_UNION, ST_ENUM };
2188 #define FLATBUFFERS_GEN_ELEMENTARY_TYPES(ET) \ 2202 ET(ET_SEQUENCE) // See SequenceType. 2204 enum ElementaryType {
2205 #define FLATBUFFERS_ET(E) E, 2206 FLATBUFFERS_GEN_ELEMENTARY_TYPES(FLATBUFFERS_ET)
2207 #undef FLATBUFFERS_ET 2210 inline const char *
const *ElementaryTypeNames() {
2211 static const char *
const names[] = {
2212 #define FLATBUFFERS_ET(E) #E, 2213 FLATBUFFERS_GEN_ELEMENTARY_TYPES(FLATBUFFERS_ET)
2214 #undef FLATBUFFERS_ET 2222 uint16_t base_type : 4;
2223 uint16_t is_vector : 1;
2224 int16_t sequence_ref : 11;
2227 static_assert(
sizeof(TypeCode) == 2,
"TypeCode");
2232 typedef const TypeTable *(*TypeFunction)();
2237 const TypeCode *type_codes;
2238 const TypeFunction *type_refs;
2239 const int32_t *values;
2240 const char *
const *names;
2254 #if !defined(_WIN32) && !defined(__CYGWIN__) 2256 extern volatile __attribute__((weak))
const char *flatbuffer_version_string;
2257 volatile __attribute__((weak))
const char *flatbuffer_version_string =
2259 FLATBUFFERS_STRING(FLATBUFFERS_VERSION_MAJOR)
"." 2260 FLATBUFFERS_STRING(FLATBUFFERS_VERSION_MINOR)
"." 2261 FLATBUFFERS_STRING(FLATBUFFERS_VERSION_REVISION);
2263 #endif // !defined(_WIN32) && !defined(__CYGWIN__) 2265 #define FLATBUFFERS_DEFINE_BITMASK_OPERATORS(E, T)\ 2266 inline E operator | (E lhs, E rhs){\ 2267 return E(T(lhs) | T(rhs));\ 2269 inline E operator & (E lhs, E rhs){\ 2270 return E(T(lhs) & T(rhs));\ 2272 inline E operator ^ (E lhs, E rhs){\ 2273 return E(T(lhs) ^ T(rhs));\ 2275 inline E operator ~ (E lhs){\ 2278 inline E operator |= (E &lhs, E rhs){\ 2282 inline E operator &= (E &lhs, E rhs){\ 2286 inline E operator ^= (E &lhs, E rhs){\ 2290 inline bool operator !(E rhs) \ 2292 return !bool(T(rhs)); \ 2297 #if defined(_MSC_VER) 2298 #pragma warning(pop) 2302 #endif // FLATBUFFERS_H_ FLATBUFFERS_CONSTEXPR size_t AlignOf()
virtual uint8_t * reallocate_downward(uint8_t *old_p, size_t old_size, size_t new_size, size_t in_use_back, size_t in_use_front)
Offset< Vector< const T * > > CreateVectorOfSortedStructs(T *v, size_t len)
Serialize an array of structs into a FlatBuffer vector in sorted order.
const T * data(const std::vector< T, Alloc > &v)
DetachedBuffer ReleaseBufferPointer()
Get the released pointer to the serialized buffer.
VectorIterator & operator+=(const uoffset_t &offset)
void Mutate(uoffset_t i, const T &val)
const U * GetAs(uoffset_t i) const
void memcpy_downward(uint8_t *old_p, size_t old_size, uint8_t *new_p, size_t new_size, size_t in_use_back, size_t in_use_front)
IndirectHelper< T >::return_type return_type
VectorIterator operator++(int)
StringOffsetMap * string_pool
VectorIterator< T, typename IndirectHelper< T >::mutable_return_type > iterator
Offset< Vector< const T * > > CreateVectorOfSortedStructs(std::vector< T > *v)
Serialize a std::vector of structs into a FlatBuffer vector in sorted order.
void fill(size_t zero_pad_bytes)
void ForceDefaults(bool fd)
In order to save space, fields that are set to their default value don't get serialized into the buff...
Offset< const T * > CreateStruct(const T &structobj)
Write a struct by itself, typically to be part of a union.
static int KeyCompare(const void *ap, const void *bp)
Vector< Offset< T > > * VectorCast(Vector< Offset< U >> *ptr)
Offset< Vector< const T * > > CreateVectorOfStructs(size_t vector_size, F f, S *state)
Serialize an array of structs into a FlatBuffer vector.
Offset< Vector< T > > CreateUninitializedVector(size_t len, T **buf)
Specialized version of CreateVector for non-copying use cases. Write the data any time later to the r...
voffset_t FieldIndexToOffset(voffset_t field_id)
VectorIterator & operator=(const VectorIterator &other)
void MutateOffset(uoffset_t i, const uint8_t *val)
void Finish(Offset< T > root, const char *file_identifier=nullptr)
Finish serializing a buffer by writing the root offset.
Offset< Vector< T > > CreateVector(const std::vector< T > &v)
Serialize a std::vector into a FlatBuffer vector.
const uint8_t * Data() const
VectorIterator operator+(const uoffset_t &offset) const
VectorIterator operator-(const uoffset_t &offset)
const_iterator begin() const
const char * c_str() const
const uint8_t * Data() const
ptrdiff_t operator-(const VectorIterator &other) const
void destroy(routine_t id)
VectorIterator(const uint8_t *data, uoffset_t i)
Offset< String > CreateString(const char *str, size_t len)
Store a string in the buffer, which can contain any binary data.
return_type LookupByKey(K key) const
vector_downward(size_t initial_size, Allocator *allocator, bool own_allocator, size_t buffer_minalign)
uint8_t * scratch_data() const
bool operator<(const VectorIterator &other) const
Offset< String > CreateString(const char *str)
Store a string in the buffer, which is null-terminated.
const uint8_t * data() const
IndirectHelper< T >::mutable_return_type mutable_return_type
return_type operator[](uoffset_t i) const
Offset< Vector< Offset< String > > > CreateVectorOfStrings(const std::vector< std::string > &v)
Serialize a std::vector<std::string> into a FlatBuffer vector. This is a convenience function for a c...
Offset< Vector< const T * > > CreateVectorOfSortedNativeStructs(std::vector< S > *v)
Serialize a std::vector of native structs into a FlatBuffer vector in sorted order.
uoffset_t scratch_size() const
FlatBufferBuilder(size_t initial_size=1024, Allocator *allocator=nullptr, bool own_allocator=false, size_t buffer_minalign=AlignOf< largest_scalar_t >())
Default constructor for FlatBufferBuilder.
T * StartVectorOfStructs(size_t vector_size)
Offset< String > CreateString(const T &str)
Store a string in the buffer, which can contain any binary data.
Helper class to hold data needed in creation of a FlatBuffer. To serialize data, you typically call o...
VectorIterator< T, typename IndirectHelper< T >::return_type > const_iterator
static return_type Read(const uint8_t *p, uoffset_t i)
Allocator & get_allocator()
uint8_t * GetCurrentBufferPointer() const
Get a pointer to an unfinished buffer.
const vector_downward * buf_
std::is_scalar< T > is_scalar
uint8_t * make_space(size_t len)
DetachedBuffer(DetachedBuffer &&other)
bool operator==(const VectorIterator &other) const
void push(const uint8_t *bytes, size_t num)
void pop(size_t bytes_to_remove)
VectorIterator & operator--()
void DedupVtables(bool dedup)
By default vtables are deduped in order to save space.
E GetEnum(uoffset_t i) const
void push_small(const T &little_endian_t)
virtual uint8_t * allocate(size_t size)=0
return_type Get(uoffset_t i) const
bool operator()(const Offset< String > &a, const Offset< String > &b) const
Offset< Vector< const T * > > CreateVectorOfNativeStructs(const S *v, size_t len)
Serialize an array of native structs into a FlatBuffer vector.
DetachedBuffer Release()
Get the released DetachedBuffer.
static DefaultAllocator & instance()
Offset< String > CreateString(char *str)
Store a string in the buffer, which is null-terminated.
VectorIterator operator--(int)
Offset< Vector< const T * > > CreateVectorOfNativeStructs(const std::vector< S > &v)
Serialize a std::vector of native structs into a FlatBuffer vector.
void Finish(uoffset_t root, const char *file_identifier, bool size_prefix)
Offset< Vector< uint8_t > > CreateVector(const std::vector< bool > &v)
bool operator<(const String &o) const
void scratch_pop(size_t bytes_to_remove)
void fill_big(size_t zero_pad_bytes)
void scratch_push_small(const T &t)
uint8_t * scratch_end() const
Offset< Vector< T > > CreateVector(size_t vector_size, F f, S *state)
Serialize values returned by a function into a FlatBuffer vector. This is a convenience function that...
Offset< Vector< const T * > > CreateVectorOfSortedNativeStructs(S *v, size_t len)
Serialize an array of native structs into a FlatBuffer vector in sorted order.
Offset< Vector< const T * > > CreateVectorOfStructs(const T *v, size_t len)
Serialize an array of structs into a FlatBuffer vector.
#define FLATBUFFERS_ASSERT
Offset< Vector< Offset< T > > > CreateVectorOfSortedTables(Offset< T > *v, size_t len)
Serialize an array of table offsets as a vector in the buffer in sorted order.
Offset< Vector< const T * > > CreateVectorOfStructs(const std::vector< T, Alloc > &v)
Serialize a std::vector of structs into a FlatBuffer vector.
uoffset_t GetSize() const
The current size of the serialized buffer, counting from the end.
uoffset_t CreateUninitializedVector(size_t len, size_t elemsize, uint8_t **buf)
Specialized version of CreateVector for non-copying use cases. Write the data any time later to the r...
static const size_t kFileIdentifierLength
The length of a FlatBuffer file header.
Offset< Vector< T > > CreateVector(const T *v, size_t len)
Serialize an array into a FlatBuffer vector.
std::random_access_iterator_tag iterator_category
virtual uint8_t * allocate(size_t size) FLATBUFFERS_OVERRIDE
static size_t VectorLength(const Vector< T > *v)
std::set< Offset< String >, StringOffsetCompare > StringOffsetMap
uint8_t * data_at(size_t offset) const
StringOffsetCompare(const vector_downward &buf)
Offset< String > CreateSharedString(const char *str, size_t len)
Store a string in the buffer, which can contain any binary data. If a string with this exact contents...
mutable_return_type GetMutableObject(uoffset_t i) const
Offset< Vector< const T * > > EndVectorOfStructs(size_t vector_size)
size_t ensure_space(size_t len)
DetachedBuffer(Allocator *allocator, bool own_allocator, uint8_t *buf, size_t reserved, uint8_t *cur, size_t sz)
Offset< String > CreateSharedString(const char *str)
Store a string in the buffer, which null-terminated. If a string with this exact contents has already...
DetachedBuffer & operator=(DetachedBuffer &&other)
const_iterator end() const
size_t GetBufferMinAlignment()
get the minimum alignment this buffer needs to be accessed properly. This is only known once all elem...
VectorIterator(const VectorIterator &other)
VectorIterator & operator=(VectorIterator &&other)
VectorIterator & operator++()
Offset< String > CreateSharedString(const std::string &str)
Store a string in the buffer, which can contain any binary data. If a string with this exact contents...
void FinishSizePrefixed(Offset< T > root, const char *file_identifier=nullptr)
Finish a buffer with a 32 bit size field pre-fixed (size of the buffer following the size field)...
Offset< String > CreateString(const String *str)
Store a string in the buffer, which can contain any binary data.
void Clear()
Reset all the state in this FlatBufferBuilder so it can be reused to construct another buffer...
static return_type Read(const uint8_t *p, uoffset_t i)
Offset< Vector< Offset< T > > > CreateVector(const Offset< T > *v, size_t len)
Offset< String > CreateString(const std::string &str)
Store a string in the buffer, which can contain any binary data.
static return_type Read(const uint8_t *p, uoffset_t i)
VectorIterator & operator-=(const uoffset_t &offset)
const void * GetStructFromOffset(size_t o) const
const String * GetAsString(uoffset_t i) const
Offset< Vector< Offset< T > > > CreateVectorOfSortedTables(std::vector< Offset< T >> *v)
Serialize an array of table offsets as a vector in the buffer in sorted order.
virtual void deallocate(uint8_t *p, size_t) FLATBUFFERS_OVERRIDE
bool operator!=(const VectorIterator &other) const
Offset< void > Union() const
Offset< String > CreateSharedString(const String *str)
Store a string in the buffer, which can contain any binary data. If a string with this exact contents...
uint8_t * GetBufferPointer() const
Get the serialized buffer (after you call Finish()).
uoffset_t difference_type