The asserts replaced by FLATBUFFERS_ASSERT. (#4701)
* The asserts replaced by FLATBUFFERS_ASSERT. Several asserts have converted to static_asserts. * Regenerate header monster generate_code.sh
This commit is contained in:
parent
86153fd740
commit
a66f9e769b
|
@ -9,6 +9,10 @@
|
|||
|
||||
#include <assert.h>
|
||||
|
||||
#if !defined(FLATBUFFERS_ASSERT)
|
||||
#define FLATBUFFERS_ASSERT assert
|
||||
#endif
|
||||
|
||||
#ifndef ARDUINO
|
||||
#include <cstdint>
|
||||
#endif
|
||||
|
@ -209,7 +213,7 @@ template<typename T> T EndianSwap(T t) {
|
|||
u.i = FLATBUFFERS_BYTESWAP64(u.i);
|
||||
return u.t;
|
||||
} else {
|
||||
assert(0);
|
||||
FLATBUFFERS_ASSERT(0);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,8 @@ template<typename T> struct Offset {
|
|||
inline void EndianCheck() {
|
||||
int endiantest = 1;
|
||||
// If this fails, see FLATBUFFERS_LITTLEENDIAN above.
|
||||
assert(*reinterpret_cast<char *>(&endiantest) == FLATBUFFERS_LITTLEENDIAN);
|
||||
FLATBUFFERS_ASSERT(*reinterpret_cast<char *>(&endiantest) ==
|
||||
FLATBUFFERS_LITTLEENDIAN);
|
||||
(void)endiantest;
|
||||
}
|
||||
|
||||
|
@ -194,7 +195,7 @@ template<typename T> class Vector {
|
|||
typedef typename IndirectHelper<T>::mutable_return_type mutable_return_type;
|
||||
|
||||
return_type Get(uoffset_t i) const {
|
||||
assert(i < size());
|
||||
FLATBUFFERS_ASSERT(i < size());
|
||||
return IndirectHelper<T>::Read(Data(), i);
|
||||
}
|
||||
|
||||
|
@ -232,7 +233,7 @@ template<typename T> class Vector {
|
|||
// Change elements if you have a non-const pointer to this object.
|
||||
// Scalars only. See reflection.h, and the documentation.
|
||||
void Mutate(uoffset_t i, const T &val) {
|
||||
assert(i < size());
|
||||
FLATBUFFERS_ASSERT(i < size());
|
||||
WriteScalar(data() + i, val);
|
||||
}
|
||||
|
||||
|
@ -240,15 +241,15 @@ template<typename T> class Vector {
|
|||
// "val" points to the new table/string, as you can obtain from
|
||||
// e.g. reflection::AddFlatBuffer().
|
||||
void MutateOffset(uoffset_t i, const uint8_t *val) {
|
||||
assert(i < size());
|
||||
assert(sizeof(T) == sizeof(uoffset_t));
|
||||
FLATBUFFERS_ASSERT(i < size());
|
||||
static_assert(sizeof(T) == sizeof(uoffset_t), "Unrelated types");
|
||||
WriteScalar(data() + i,
|
||||
static_cast<uoffset_t>(val - (Data() + i * sizeof(uoffset_t))));
|
||||
}
|
||||
|
||||
// Get a mutable pointer to tables/strings inside this vector.
|
||||
mutable_return_type GetMutableObject(uoffset_t i) const {
|
||||
assert(i < size());
|
||||
FLATBUFFERS_ASSERT(i < size());
|
||||
return const_cast<mutable_return_type>(IndirectHelper<T>::Read(Data(), i));
|
||||
}
|
||||
|
||||
|
@ -368,7 +369,7 @@ class Allocator {
|
|||
virtual uint8_t *reallocate_downward(uint8_t *old_p, size_t old_size,
|
||||
size_t new_size, size_t in_use_back,
|
||||
size_t in_use_front) {
|
||||
assert(new_size > old_size); // vector_downward only grows
|
||||
FLATBUFFERS_ASSERT(new_size > old_size); // vector_downward only grows
|
||||
uint8_t *new_p = allocate(new_size);
|
||||
memcpy_downward(old_p, old_size, new_p, new_size, in_use_back,
|
||||
in_use_front);
|
||||
|
@ -428,7 +429,7 @@ class DetachedBuffer {
|
|||
reserved_(reserved),
|
||||
cur_(cur),
|
||||
size_(sz) {
|
||||
assert(allocator_);
|
||||
FLATBUFFERS_ASSERT(allocator_);
|
||||
}
|
||||
|
||||
DetachedBuffer(DetachedBuffer &&other)
|
||||
|
@ -499,7 +500,7 @@ class DetachedBuffer {
|
|||
|
||||
inline void destroy() {
|
||||
if (buf_) {
|
||||
assert(allocator_);
|
||||
FLATBUFFERS_ASSERT(allocator_);
|
||||
allocator_->deallocate(buf_, reserved_);
|
||||
}
|
||||
if (own_allocator_ && allocator_) { delete allocator_; }
|
||||
|
@ -537,12 +538,12 @@ class vector_downward {
|
|||
buf_(nullptr),
|
||||
cur_(nullptr),
|
||||
scratch_(nullptr) {
|
||||
assert(allocator_);
|
||||
FLATBUFFERS_ASSERT(allocator_);
|
||||
}
|
||||
|
||||
~vector_downward() {
|
||||
if (buf_) {
|
||||
assert(allocator_);
|
||||
FLATBUFFERS_ASSERT(allocator_);
|
||||
allocator_->deallocate(buf_, reserved_);
|
||||
}
|
||||
if (own_allocator_ && allocator_) { delete allocator_; }
|
||||
|
@ -550,7 +551,7 @@ class vector_downward {
|
|||
|
||||
void reset() {
|
||||
if (buf_) {
|
||||
assert(allocator_);
|
||||
FLATBUFFERS_ASSERT(allocator_);
|
||||
allocator_->deallocate(buf_, reserved_);
|
||||
buf_ = nullptr;
|
||||
}
|
||||
|
@ -583,11 +584,11 @@ class vector_downward {
|
|||
}
|
||||
|
||||
size_t ensure_space(size_t len) {
|
||||
assert(cur_ >= scratch_ && scratch_ >= buf_);
|
||||
FLATBUFFERS_ASSERT(cur_ >= scratch_ && scratch_ >= buf_);
|
||||
if (len > static_cast<size_t>(cur_ - scratch_)) { reallocate(len); }
|
||||
// Beyond this, signed offsets may not have enough range:
|
||||
// (FlatBuffers > 2GB not supported).
|
||||
assert(size() < FLATBUFFERS_MAX_BUFFER_SIZE);
|
||||
FLATBUFFERS_ASSERT(size() < FLATBUFFERS_MAX_BUFFER_SIZE);
|
||||
return len;
|
||||
}
|
||||
|
||||
|
@ -609,17 +610,17 @@ class vector_downward {
|
|||
size_t capacity() const { return reserved_; }
|
||||
|
||||
uint8_t *data() const {
|
||||
assert(cur_);
|
||||
FLATBUFFERS_ASSERT(cur_);
|
||||
return cur_;
|
||||
}
|
||||
|
||||
uint8_t *scratch_data() const {
|
||||
assert(buf_);
|
||||
FLATBUFFERS_ASSERT(buf_);
|
||||
return buf_;
|
||||
}
|
||||
|
||||
uint8_t *scratch_end() const {
|
||||
assert(scratch_);
|
||||
FLATBUFFERS_ASSERT(scratch_);
|
||||
return scratch_;
|
||||
}
|
||||
|
||||
|
@ -671,7 +672,7 @@ class vector_downward {
|
|||
uint8_t *scratch_; // Points to the end of the scratchpad in use.
|
||||
|
||||
void reallocate(size_t len) {
|
||||
assert(allocator_);
|
||||
FLATBUFFERS_ASSERT(allocator_);
|
||||
auto old_reserved = reserved_;
|
||||
auto old_size = size();
|
||||
auto old_scratch_size = scratch_size();
|
||||
|
@ -815,7 +816,7 @@ class FlatBufferBuilder {
|
|||
// FlatBufferBuilder::Finish with your root table.
|
||||
// If you really need to access an unfinished buffer, call
|
||||
// GetCurrentBufferPointer instead.
|
||||
assert(finished);
|
||||
FLATBUFFERS_ASSERT(finished);
|
||||
}
|
||||
/// @endcond
|
||||
|
||||
|
@ -908,7 +909,7 @@ class FlatBufferBuilder {
|
|||
// Align to ensure GetSize() below is correct.
|
||||
Align(sizeof(uoffset_t));
|
||||
// Offset must refer to something already in buffer.
|
||||
assert(off && off <= GetSize());
|
||||
FLATBUFFERS_ASSERT(off && off <= GetSize());
|
||||
return GetSize() - off + static_cast<uoffset_t>(sizeof(uoffset_t));
|
||||
}
|
||||
|
||||
|
@ -921,9 +922,9 @@ class FlatBufferBuilder {
|
|||
// Ignoring this assert may appear to work in simple cases, but the reason
|
||||
// it is here is that storing objects in-line may cause vtable offsets
|
||||
// to not fit anymore. It also leads to vtable duplication.
|
||||
assert(!nested);
|
||||
FLATBUFFERS_ASSERT(!nested);
|
||||
// If you hit this, fields were added outside the scope of a table.
|
||||
assert(!num_field_loc);
|
||||
FLATBUFFERS_ASSERT(!num_field_loc);
|
||||
}
|
||||
|
||||
// From generated code (or from the parser), we call StartTable/EndTable
|
||||
|
@ -939,7 +940,7 @@ class FlatBufferBuilder {
|
|||
// resulting vtable offset.
|
||||
uoffset_t EndTable(uoffset_t start) {
|
||||
// If you get this assert, a corresponding StartTable wasn't called.
|
||||
assert(nested);
|
||||
FLATBUFFERS_ASSERT(nested);
|
||||
// Write the vtable offset, which is the start of any Table.
|
||||
// We fill it's value later.
|
||||
auto vtableoffsetloc = PushElement<soffset_t>(0);
|
||||
|
@ -953,7 +954,8 @@ class FlatBufferBuilder {
|
|||
FieldIndexToOffset(0));
|
||||
buf_.fill_big(max_voffset_);
|
||||
auto table_object_size = vtableoffsetloc - start;
|
||||
assert(table_object_size < 0x10000); // Vtable use 16bit offsets.
|
||||
// Vtable use 16bit offsets.
|
||||
FLATBUFFERS_ASSERT(table_object_size < 0x10000);
|
||||
WriteScalar<voffset_t>(buf_.data() + sizeof(voffset_t),
|
||||
static_cast<voffset_t>(table_object_size));
|
||||
WriteScalar<voffset_t>(buf_.data(), max_voffset_);
|
||||
|
@ -963,7 +965,8 @@ class FlatBufferBuilder {
|
|||
auto field_location = reinterpret_cast<FieldLoc *>(it);
|
||||
auto pos = static_cast<voffset_t>(vtableoffsetloc - field_location->off);
|
||||
// If this asserts, it means you've set a field twice.
|
||||
assert(!ReadScalar<voffset_t>(buf_.data() + field_location->id));
|
||||
FLATBUFFERS_ASSERT(
|
||||
!ReadScalar<voffset_t>(buf_.data() + field_location->id));
|
||||
WriteScalar<voffset_t>(buf_.data() + field_location->id, pos);
|
||||
}
|
||||
ClearOffsets();
|
||||
|
@ -1011,7 +1014,7 @@ class FlatBufferBuilder {
|
|||
auto vtable_ptr = table_ptr - ReadScalar<soffset_t>(table_ptr);
|
||||
bool ok = ReadScalar<voffset_t>(vtable_ptr + field) != 0;
|
||||
// If this fails, the caller will show what field needs to be set.
|
||||
assert(ok);
|
||||
FLATBUFFERS_ASSERT(ok);
|
||||
(void)ok;
|
||||
}
|
||||
|
||||
|
@ -1143,7 +1146,7 @@ class FlatBufferBuilder {
|
|||
|
||||
/// @cond FLATBUFFERS_INTERNAL
|
||||
uoffset_t EndVector(size_t len) {
|
||||
assert(nested); // Hit if no corresponding StartVector.
|
||||
FLATBUFFERS_ASSERT(nested); // Hit if no corresponding StartVector.
|
||||
nested = false;
|
||||
return PushElement(static_cast<uoffset_t>(len));
|
||||
}
|
||||
|
@ -1559,7 +1562,7 @@ class FlatBufferBuilder {
|
|||
(file_identifier ? kFileIdentifierLength : 0),
|
||||
minalign_);
|
||||
if (file_identifier) {
|
||||
assert(strlen(file_identifier) == kFileIdentifierLength);
|
||||
FLATBUFFERS_ASSERT(strlen(file_identifier) == kFileIdentifierLength);
|
||||
PushBytes(reinterpret_cast<const uint8_t *>(file_identifier),
|
||||
kFileIdentifierLength);
|
||||
}
|
||||
|
@ -1698,7 +1701,7 @@ class Verifier FLATBUFFERS_FINAL_CLASS {
|
|||
bool Check(bool ok) const {
|
||||
// clang-format off
|
||||
#ifdef FLATBUFFERS_DEBUG_VERIFICATION_FAILURE
|
||||
assert(ok);
|
||||
FLATBUFFERS_ASSERT(ok);
|
||||
#endif
|
||||
#ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE
|
||||
if (!ok)
|
||||
|
@ -2066,7 +2069,7 @@ inline const uint8_t *GetBufferStartFromRootPointer(const void *root) {
|
|||
// or the buffer is corrupt.
|
||||
// Assert, because calling this function with bad data may cause reads
|
||||
// outside of buffer boundaries.
|
||||
assert(false);
|
||||
FLATBUFFERS_ASSERT(false);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
|
|
@ -97,23 +97,23 @@ inline bool IsFixedTypedVector(Type t) {
|
|||
}
|
||||
|
||||
inline Type ToTypedVector(Type t, size_t fixed_len = 0) {
|
||||
assert(IsTypedVectorElementType(t));
|
||||
FLATBUFFERS_ASSERT(IsTypedVectorElementType(t));
|
||||
switch (fixed_len) {
|
||||
case 0: return static_cast<Type>(t - TYPE_INT + TYPE_VECTOR_INT);
|
||||
case 2: return static_cast<Type>(t - TYPE_INT + TYPE_VECTOR_INT2);
|
||||
case 3: return static_cast<Type>(t - TYPE_INT + TYPE_VECTOR_INT3);
|
||||
case 4: return static_cast<Type>(t - TYPE_INT + TYPE_VECTOR_INT4);
|
||||
default: assert(0); return TYPE_NULL;
|
||||
default: FLATBUFFERS_ASSERT(0); return TYPE_NULL;
|
||||
}
|
||||
}
|
||||
|
||||
inline Type ToTypedVectorElementType(Type t) {
|
||||
assert(IsTypedVector(t));
|
||||
FLATBUFFERS_ASSERT(IsTypedVector(t));
|
||||
return static_cast<Type>(t - TYPE_VECTOR_INT + TYPE_INT);
|
||||
}
|
||||
|
||||
inline Type ToFixedTypedVectorElementType(Type t, uint8_t *len) {
|
||||
assert(IsFixedTypedVector(t));
|
||||
FLATBUFFERS_ASSERT(IsFixedTypedVector(t));
|
||||
auto fixed_type = t - TYPE_VECTOR_INT2;
|
||||
*len = static_cast<uint8_t>(fixed_type / 3 +
|
||||
2); // 3 types each, starting from length 2.
|
||||
|
@ -690,7 +690,7 @@ class Reference {
|
|||
return Mutate(dest, static_cast<double>(t), byte_width, value_width);
|
||||
if (byte_width == sizeof(float))
|
||||
return Mutate(dest, static_cast<float>(t), byte_width, value_width);
|
||||
assert(false);
|
||||
FLATBUFFERS_ASSERT(false);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1026,11 +1026,11 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
// We should have interleaved keys and values on the stack.
|
||||
// Make sure it is an even number:
|
||||
auto len = stack_.size() - start;
|
||||
assert(!(len & 1));
|
||||
FLATBUFFERS_ASSERT(!(len & 1));
|
||||
len /= 2;
|
||||
// Make sure keys are all strings:
|
||||
for (auto key = start; key < stack_.size(); key += 2) {
|
||||
assert(stack_[key].type_ == TYPE_KEY);
|
||||
FLATBUFFERS_ASSERT(stack_[key].type_ == TYPE_KEY);
|
||||
}
|
||||
// Now sort values, so later we can do a binary seach lookup.
|
||||
// We want to sort 2 array elements at a time.
|
||||
|
@ -1061,7 +1061,7 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
// TODO: Have to check for pointer equality, as some sort
|
||||
// implementation apparently call this function with the same
|
||||
// element?? Why?
|
||||
assert(comp || &a == &b);
|
||||
FLATBUFFERS_ASSERT(comp || &a == &b);
|
||||
return comp < 0;
|
||||
});
|
||||
// First create a vector out of all keys.
|
||||
|
@ -1141,9 +1141,9 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
template<typename T> size_t FixedTypedVector(const T *elems, size_t len) {
|
||||
// We only support a few fixed vector lengths. Anything bigger use a
|
||||
// regular typed vector.
|
||||
assert(len >= 2 && len <= 4);
|
||||
FLATBUFFERS_ASSERT(len >= 2 && len <= 4);
|
||||
// And only scalar values.
|
||||
assert(flatbuffers::is_scalar<T>::value);
|
||||
static_assert(flatbuffers::is_scalar<T>::value, "Unrelated types");
|
||||
return ScalarVector(elems, len, true);
|
||||
}
|
||||
|
||||
|
@ -1222,7 +1222,7 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
// in a parent. You need to have exactly one root to finish a buffer.
|
||||
// Check your Start/End calls are matched, and all objects are inside
|
||||
// some other object.
|
||||
assert(stack_.size() == 1);
|
||||
FLATBUFFERS_ASSERT(stack_.size() == 1);
|
||||
|
||||
// Write root value.
|
||||
auto byte_width = Align(stack_[0].ElemWidth(buf_.size(), 0));
|
||||
|
@ -1240,7 +1240,7 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
// If you get this assert, you're attempting to get access a buffer
|
||||
// which hasn't been finished yet. Be sure to call
|
||||
// Builder::Finish with your root object.
|
||||
assert(finished_);
|
||||
FLATBUFFERS_ASSERT(finished_);
|
||||
}
|
||||
|
||||
// Align to prepare for writing a scalar with a certain size.
|
||||
|
@ -1257,7 +1257,7 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
}
|
||||
|
||||
template<typename T> void Write(T val, size_t byte_width) {
|
||||
assert(sizeof(T) >= byte_width);
|
||||
FLATBUFFERS_ASSERT(sizeof(T) >= byte_width);
|
||||
val = flatbuffers::EndianScalar(val);
|
||||
WriteBytes(&val, byte_width);
|
||||
}
|
||||
|
@ -1268,13 +1268,13 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
case 4: Write(static_cast<float>(f), byte_width); break;
|
||||
// case 2: Write(static_cast<half>(f), byte_width); break;
|
||||
// case 1: Write(static_cast<quarter>(f), byte_width); break;
|
||||
default: assert(0);
|
||||
default: FLATBUFFERS_ASSERT(0);
|
||||
}
|
||||
}
|
||||
|
||||
void WriteOffset(uint64_t o, uint8_t byte_width) {
|
||||
auto reloff = buf_.size() - o;
|
||||
assert(byte_width == 8 || reloff < 1ULL << (byte_width * 8));
|
||||
FLATBUFFERS_ASSERT(byte_width == 8 || reloff < 1ULL << (byte_width * 8));
|
||||
Write(reloff, byte_width);
|
||||
}
|
||||
|
||||
|
@ -1291,12 +1291,12 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
case 2: return BIT_WIDTH_16;
|
||||
case 4: return BIT_WIDTH_32;
|
||||
case 8: return BIT_WIDTH_64;
|
||||
default: assert(false); return BIT_WIDTH_64;
|
||||
default: FLATBUFFERS_ASSERT(false); return BIT_WIDTH_64;
|
||||
}
|
||||
}
|
||||
|
||||
template<typename T> static Type GetScalarType() {
|
||||
assert(flatbuffers::is_scalar<T>::value);
|
||||
static_assert(flatbuffers::is_scalar<T>::value, "Unrelated types");
|
||||
return flatbuffers::is_floating_point<T>::value
|
||||
? TYPE_FLOAT
|
||||
: flatbuffers::is_same<T, bool>::value
|
||||
|
@ -1360,7 +1360,7 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
byte_width)
|
||||
return bit_width;
|
||||
}
|
||||
assert(false); // Must match one of the sizes above.
|
||||
FLATBUFFERS_ASSERT(false); // Must match one of the sizes above.
|
||||
return BIT_WIDTH_64;
|
||||
}
|
||||
}
|
||||
|
@ -1405,7 +1405,7 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
// byte vector > 255 elements). For such types, write a "blob" instead.
|
||||
// TODO: instead of asserting, could write vector with larger elements
|
||||
// instead, though that would be wasteful.
|
||||
assert(WidthU(len) <= bit_width);
|
||||
FLATBUFFERS_ASSERT(WidthU(len) <= bit_width);
|
||||
if (!fixed) Write<uint64_t>(len, byte_width);
|
||||
auto vloc = buf_.size();
|
||||
for (size_t i = 0; i < len; i++) Write(elems[i], byte_width);
|
||||
|
@ -1437,13 +1437,13 @@ class Builder FLATBUFFERS_FINAL_CLASS {
|
|||
} else {
|
||||
// If you get this assert, you are writing a typed vector with
|
||||
// elements that are not all the same type.
|
||||
assert(vector_type == stack_[i].type_);
|
||||
FLATBUFFERS_ASSERT(vector_type == stack_[i].type_);
|
||||
}
|
||||
}
|
||||
}
|
||||
// If you get this assert, your fixed types are not one of:
|
||||
// Int / UInt / Float / Key.
|
||||
assert(IsTypedVectorElementType(vector_type));
|
||||
FLATBUFFERS_ASSERT(IsTypedVectorElementType(vector_type));
|
||||
auto byte_width = Align(bit_width);
|
||||
// Write vector. First the keys width/offset if available, and size.
|
||||
if (keys) {
|
||||
|
|
|
@ -91,14 +91,14 @@ class SliceAllocator : public Allocator {
|
|||
virtual ~SliceAllocator() { grpc_slice_unref(slice_); }
|
||||
|
||||
virtual uint8_t *allocate(size_t size) override {
|
||||
assert(GRPC_SLICE_IS_EMPTY(slice_));
|
||||
FLATBUFFERS_ASSERT(GRPC_SLICE_IS_EMPTY(slice_));
|
||||
slice_ = grpc_slice_malloc(size);
|
||||
return GRPC_SLICE_START_PTR(slice_);
|
||||
}
|
||||
|
||||
virtual void deallocate(uint8_t *p, size_t size) override {
|
||||
assert(p == GRPC_SLICE_START_PTR(slice_));
|
||||
assert(size == GRPC_SLICE_LENGTH(slice_));
|
||||
FLATBUFFERS_ASSERT(p == GRPC_SLICE_START_PTR(slice_));
|
||||
FLATBUFFERS_ASSERT(size == GRPC_SLICE_LENGTH(slice_));
|
||||
grpc_slice_unref(slice_);
|
||||
slice_ = grpc_empty_slice();
|
||||
}
|
||||
|
@ -106,9 +106,9 @@ class SliceAllocator : public Allocator {
|
|||
virtual uint8_t *reallocate_downward(uint8_t *old_p, size_t old_size,
|
||||
size_t new_size, size_t in_use_back,
|
||||
size_t in_use_front) override {
|
||||
assert(old_p == GRPC_SLICE_START_PTR(slice_));
|
||||
assert(old_size == GRPC_SLICE_LENGTH(slice_));
|
||||
assert(new_size > old_size);
|
||||
FLATBUFFERS_ASSERT(old_p == GRPC_SLICE_START_PTR(slice_));
|
||||
FLATBUFFERS_ASSERT(old_size == GRPC_SLICE_LENGTH(slice_));
|
||||
FLATBUFFERS_ASSERT(new_size > old_size);
|
||||
grpc_slice old_slice = slice_;
|
||||
grpc_slice new_slice = grpc_slice_malloc(new_size);
|
||||
uint8_t *new_p = GRPC_SLICE_START_PTR(new_slice);
|
||||
|
@ -121,8 +121,8 @@ class SliceAllocator : public Allocator {
|
|||
|
||||
private:
|
||||
grpc_slice &get_slice(uint8_t *p, size_t size) {
|
||||
assert(p == GRPC_SLICE_START_PTR(slice_));
|
||||
assert(size == GRPC_SLICE_LENGTH(slice_));
|
||||
FLATBUFFERS_ASSERT(p == GRPC_SLICE_START_PTR(slice_));
|
||||
FLATBUFFERS_ASSERT(size == GRPC_SLICE_LENGTH(slice_));
|
||||
return slice_;
|
||||
}
|
||||
|
||||
|
@ -162,10 +162,10 @@ class MessageBuilder : private detail::SliceAllocatorMember,
|
|||
auto msg_data = buf_.data(); // pointer to msg
|
||||
auto msg_size = buf_.size(); // size of msg
|
||||
// Do some sanity checks on data/size
|
||||
assert(msg_data);
|
||||
assert(msg_size);
|
||||
assert(msg_data >= buf_data);
|
||||
assert(msg_data + msg_size <= buf_data + buf_size);
|
||||
FLATBUFFERS_ASSERT(msg_data);
|
||||
FLATBUFFERS_ASSERT(msg_size);
|
||||
FLATBUFFERS_ASSERT(msg_data >= buf_data);
|
||||
FLATBUFFERS_ASSERT(msg_data + msg_size <= buf_data + buf_size);
|
||||
// Calculate offsets from the buffer start
|
||||
auto begin = msg_data - buf_data;
|
||||
auto end = begin + msg_size;
|
||||
|
|
|
@ -181,7 +181,7 @@ template<typename T> class SymbolTable {
|
|||
dict.erase(it);
|
||||
dict[newname] = obj;
|
||||
} else {
|
||||
assert(false);
|
||||
FLATBUFFERS_ASSERT(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -485,7 +485,7 @@ class CheckedError {
|
|||
*this = other; // Use assignment operator.
|
||||
}
|
||||
|
||||
~CheckedError() { assert(has_been_checked_); }
|
||||
~CheckedError() { FLATBUFFERS_ASSERT(has_been_checked_); }
|
||||
|
||||
bool Check() {
|
||||
has_been_checked_ = true;
|
||||
|
|
|
@ -89,9 +89,9 @@ inline size_t InlineSize(ElementaryType type, const TypeTable *type_table) {
|
|||
case ST_TABLE:
|
||||
case ST_UNION: return 4;
|
||||
case ST_STRUCT: return type_table->values[type_table->num_elems];
|
||||
default: assert(false); return 1;
|
||||
default: FLATBUFFERS_ASSERT(false); return 1;
|
||||
}
|
||||
default: assert(false); return 1;
|
||||
default: FLATBUFFERS_ASSERT(false); return 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -190,7 +190,7 @@ inline void IterateValue(ElementaryType type, const uint8_t *val,
|
|||
case ST_STRUCT: IterateObject(val, type_table, visitor); break;
|
||||
case ST_UNION: {
|
||||
val += ReadScalar<uoffset_t>(val);
|
||||
assert(prev_val);
|
||||
FLATBUFFERS_ASSERT(prev_val);
|
||||
auto union_type = *prev_val; // Always a uint8_t.
|
||||
if (vector_index >= 0) {
|
||||
auto type_vec = reinterpret_cast<const Vector<uint8_t> *>(prev_val);
|
||||
|
@ -217,7 +217,7 @@ inline void IterateValue(ElementaryType type, const uint8_t *val,
|
|||
}
|
||||
break;
|
||||
}
|
||||
case ST_ENUM: assert(false); break;
|
||||
case ST_ENUM: FLATBUFFERS_ASSERT(false); break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -72,20 +72,20 @@ inline const Table *GetAnyRoot(const uint8_t *flatbuf) {
|
|||
|
||||
// Get a field's default, if you know it's an integer, and its exact type.
|
||||
template<typename T> T GetFieldDefaultI(const reflection::Field &field) {
|
||||
assert(sizeof(T) == GetTypeSize(field.type()->base_type()));
|
||||
FLATBUFFERS_ASSERT(sizeof(T) == GetTypeSize(field.type()->base_type()));
|
||||
return static_cast<T>(field.default_integer());
|
||||
}
|
||||
|
||||
// Get a field's default, if you know it's floating point and its exact type.
|
||||
template<typename T> T GetFieldDefaultF(const reflection::Field &field) {
|
||||
assert(sizeof(T) == GetTypeSize(field.type()->base_type()));
|
||||
FLATBUFFERS_ASSERT(sizeof(T) == GetTypeSize(field.type()->base_type()));
|
||||
return static_cast<T>(field.default_real());
|
||||
}
|
||||
|
||||
// Get a field, if you know it's an integer, and its exact type.
|
||||
template<typename T>
|
||||
T GetFieldI(const Table &table, const reflection::Field &field) {
|
||||
assert(sizeof(T) == GetTypeSize(field.type()->base_type()));
|
||||
FLATBUFFERS_ASSERT(sizeof(T) == GetTypeSize(field.type()->base_type()));
|
||||
return table.GetField<T>(field.offset(),
|
||||
static_cast<T>(field.default_integer()));
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ T GetFieldI(const Table &table, const reflection::Field &field) {
|
|||
// Get a field, if you know it's floating point and its exact type.
|
||||
template<typename T>
|
||||
T GetFieldF(const Table &table, const reflection::Field &field) {
|
||||
assert(sizeof(T) == GetTypeSize(field.type()->base_type()));
|
||||
FLATBUFFERS_ASSERT(sizeof(T) == GetTypeSize(field.type()->base_type()));
|
||||
return table.GetField<T>(field.offset(),
|
||||
static_cast<T>(field.default_real()));
|
||||
}
|
||||
|
@ -101,15 +101,15 @@ T GetFieldF(const Table &table, const reflection::Field &field) {
|
|||
// Get a field, if you know it's a string.
|
||||
inline const String *GetFieldS(const Table &table,
|
||||
const reflection::Field &field) {
|
||||
assert(field.type()->base_type() == reflection::String);
|
||||
FLATBUFFERS_ASSERT(field.type()->base_type() == reflection::String);
|
||||
return table.GetPointer<const String *>(field.offset());
|
||||
}
|
||||
|
||||
// Get a field, if you know it's a vector.
|
||||
template<typename T>
|
||||
Vector<T> *GetFieldV(const Table &table, const reflection::Field &field) {
|
||||
assert(field.type()->base_type() == reflection::Vector &&
|
||||
sizeof(T) == GetTypeSize(field.type()->element()));
|
||||
FLATBUFFERS_ASSERT(field.type()->base_type() == reflection::Vector &&
|
||||
sizeof(T) == GetTypeSize(field.type()->element()));
|
||||
return table.GetPointer<Vector<T> *>(field.offset());
|
||||
}
|
||||
|
||||
|
@ -123,8 +123,8 @@ inline VectorOfAny *GetFieldAnyV(const Table &table,
|
|||
|
||||
// Get a field, if you know it's a table.
|
||||
inline Table *GetFieldT(const Table &table, const reflection::Field &field) {
|
||||
assert(field.type()->base_type() == reflection::Obj ||
|
||||
field.type()->base_type() == reflection::Union);
|
||||
FLATBUFFERS_ASSERT(field.type()->base_type() == reflection::Obj ||
|
||||
field.type()->base_type() == reflection::Union);
|
||||
return table.GetPointer<Table *>(field.offset());
|
||||
}
|
||||
|
||||
|
@ -133,14 +133,14 @@ inline const Struct *GetFieldStruct(const Table &table,
|
|||
const reflection::Field &field) {
|
||||
// TODO: This does NOT check if the field is a table or struct, but we'd need
|
||||
// access to the schema to check the is_struct flag.
|
||||
assert(field.type()->base_type() == reflection::Obj);
|
||||
FLATBUFFERS_ASSERT(field.type()->base_type() == reflection::Obj);
|
||||
return table.GetStruct<const Struct *>(field.offset());
|
||||
}
|
||||
|
||||
// Get a structure's field, if you know it's a struct.
|
||||
inline const Struct *GetFieldStruct(const Struct &structure,
|
||||
const reflection::Field &field) {
|
||||
assert(field.type()->base_type() == reflection::Obj);
|
||||
FLATBUFFERS_ASSERT(field.type()->base_type() == reflection::Obj);
|
||||
return structure.GetStruct<const Struct *>(field.offset());
|
||||
}
|
||||
|
||||
|
@ -262,12 +262,12 @@ template<typename T>
|
|||
bool SetField(Table *table, const reflection::Field &field, T val) {
|
||||
reflection::BaseType type = field.type()->base_type();
|
||||
if (!IsScalar(type)) { return false; }
|
||||
assert(sizeof(T) == GetTypeSize(type));
|
||||
FLATBUFFERS_ASSERT(sizeof(T) == GetTypeSize(type));
|
||||
T def;
|
||||
if (IsInteger(type)) {
|
||||
def = GetFieldDefaultI<T>(field);
|
||||
} else {
|
||||
assert(IsFloat(type));
|
||||
FLATBUFFERS_ASSERT(IsFloat(type));
|
||||
def = GetFieldDefaultF<T>(field);
|
||||
}
|
||||
return table->SetField(field.offset(), val, def);
|
||||
|
@ -386,7 +386,7 @@ inline const reflection::Object &GetUnionType(
|
|||
// TODO: this is clumsy and slow, but no other way to find it?
|
||||
auto type_field = parent.fields()->LookupByKey(
|
||||
(unionfield.name()->str() + UnionTypeFieldSuffix()).c_str());
|
||||
assert(type_field);
|
||||
FLATBUFFERS_ASSERT(type_field);
|
||||
auto union_type = GetFieldI<uint8_t>(table, *type_field);
|
||||
auto enumval = enumdef->values()->LookupByKey(union_type);
|
||||
return *enumval->object();
|
||||
|
@ -444,7 +444,8 @@ const uint8_t *AddFlatBuffer(std::vector<uint8_t> &flatbuf,
|
|||
|
||||
inline bool SetFieldT(Table *table, const reflection::Field &field,
|
||||
const uint8_t *val) {
|
||||
assert(sizeof(uoffset_t) == GetTypeSize(field.type()->base_type()));
|
||||
FLATBUFFERS_ASSERT(sizeof(uoffset_t) ==
|
||||
GetTypeSize(field.type()->base_type()));
|
||||
return table->SetPointer(field.offset(), val);
|
||||
}
|
||||
|
||||
|
|
|
@ -284,7 +284,7 @@ inline std::string AbsolutePath(const std::string &filepath) {
|
|||
// Convert a unicode code point into a UTF-8 representation by appending it
|
||||
// to a string. Returns the number of bytes generated.
|
||||
inline int ToUTF8(uint32_t ucc, std::string *out) {
|
||||
assert(!(ucc & 0x80000000)); // Top bit can't be set.
|
||||
FLATBUFFERS_ASSERT(!(ucc & 0x80000000)); // Top bit can't be set.
|
||||
// 6 possible encodings: http://en.wikipedia.org/wiki/UTF-8
|
||||
for (int i = 0; i < 6; i++) {
|
||||
// Max bits this encoding can represent.
|
||||
|
@ -302,7 +302,7 @@ inline int ToUTF8(uint32_t ucc, std::string *out) {
|
|||
return i + 1; // Return the number of bytes added.
|
||||
}
|
||||
}
|
||||
assert(0); // Impossible to arrive here.
|
||||
FLATBUFFERS_ASSERT(0); // Impossible to arrive here.
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ void CodeWriter::operator+=(std::string text) {
|
|||
const std::string &value = iter->second;
|
||||
stream_ << value;
|
||||
} else {
|
||||
assert(false && "could not find key");
|
||||
FLATBUFFERS_ASSERT(false && "could not find key");
|
||||
stream_ << key;
|
||||
}
|
||||
|
||||
|
|
|
@ -216,7 +216,7 @@ class CppGenerator : public BaseGenerator {
|
|||
|
||||
if (parser_.opts.include_dependence_headers) { GenIncludeDependencies(); }
|
||||
|
||||
assert(!cur_name_space_);
|
||||
FLATBUFFERS_ASSERT(!cur_name_space_);
|
||||
|
||||
// Generate forward declarations for all structs/tables, since they may
|
||||
// have circular references.
|
||||
|
@ -667,7 +667,7 @@ class CppGenerator : public BaseGenerator {
|
|||
return actual_type ? (native_type ? "std::string" : "flatbuffers::String")
|
||||
: Name(ev);
|
||||
} else {
|
||||
assert(false);
|
||||
FLATBUFFERS_ASSERT(false);
|
||||
return Name(ev);
|
||||
}
|
||||
}
|
||||
|
@ -897,7 +897,7 @@ class CppGenerator : public BaseGenerator {
|
|||
}
|
||||
|
||||
if (parser_.opts.scoped_enums || parser_.opts.prefixed_enums) {
|
||||
assert(minv && maxv);
|
||||
FLATBUFFERS_ASSERT(minv && maxv);
|
||||
|
||||
code_.SetValue("SEP", ",\n");
|
||||
if (enum_def.attributes.Lookup("bit_flags")) {
|
||||
|
@ -1118,7 +1118,7 @@ class CppGenerator : public BaseGenerator {
|
|||
code_ += getptr;
|
||||
code_ += " return verifier.Verify(ptr);";
|
||||
} else {
|
||||
assert(false);
|
||||
FLATBUFFERS_ASSERT(false);
|
||||
}
|
||||
code_ += " }";
|
||||
} else {
|
||||
|
@ -1169,7 +1169,7 @@ class CppGenerator : public BaseGenerator {
|
|||
} else if (ev.union_type.base_type == BASE_TYPE_STRING) {
|
||||
code_ += " return new std::string(ptr->c_str(), ptr->size());";
|
||||
} else {
|
||||
assert(false);
|
||||
FLATBUFFERS_ASSERT(false);
|
||||
}
|
||||
code_ += " }";
|
||||
}
|
||||
|
@ -1202,7 +1202,7 @@ class CppGenerator : public BaseGenerator {
|
|||
} else if (ev.union_type.base_type == BASE_TYPE_STRING) {
|
||||
code_ += " return _fbb.CreateString(*ptr).Union();";
|
||||
} else {
|
||||
assert(false);
|
||||
FLATBUFFERS_ASSERT(false);
|
||||
}
|
||||
code_ += " }";
|
||||
}
|
||||
|
@ -1244,7 +1244,7 @@ class CppGenerator : public BaseGenerator {
|
|||
" value = new {{TYPE}}(*reinterpret_cast<{{TYPE}} *>"
|
||||
"(u.value));";
|
||||
} else {
|
||||
code_ += " assert(false); // {{TYPE}} not copyable.";
|
||||
code_ += " FLATBUFFERS_ASSERT(false); // {{TYPE}} not copyable.";
|
||||
}
|
||||
code_ += " break;";
|
||||
code_ += " }";
|
||||
|
@ -1697,7 +1697,7 @@ class CppGenerator : public BaseGenerator {
|
|||
nested->constant);
|
||||
nested_root = parser_.LookupStruct(qualified_name);
|
||||
}
|
||||
assert(nested_root); // Guaranteed to exist by parser.
|
||||
FLATBUFFERS_ASSERT(nested_root); // Guaranteed to exist by parser.
|
||||
(void)nested_root;
|
||||
code_.SetValue("CPP_NAME", TranslateNameSpace(qualified_name));
|
||||
|
||||
|
@ -2092,7 +2092,7 @@ class CppGenerator : public BaseGenerator {
|
|||
break;
|
||||
}
|
||||
case BASE_TYPE_UTYPE: {
|
||||
assert(union_field->value.type.base_type == BASE_TYPE_UNION);
|
||||
FLATBUFFERS_ASSERT(union_field->value.type.base_type == BASE_TYPE_UNION);
|
||||
// Generate code that sets the union type, of the form:
|
||||
// _o->field.type = _e;
|
||||
code += "_o->" + union_field->name + ".type = _e;";
|
||||
|
@ -2403,7 +2403,7 @@ class CppGenerator : public BaseGenerator {
|
|||
f((1 << i) * 8, code_ptr, id);
|
||||
}
|
||||
}
|
||||
assert(!(field.padding & ~0xF));
|
||||
FLATBUFFERS_ASSERT(!(field.padding & ~0xF));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -131,7 +131,7 @@ const LanguageParameters &GetLangParams(IDLOptions::Language lang) {
|
|||
if (lang == IDLOptions::kJava) {
|
||||
return language_parameters[0];
|
||||
} else {
|
||||
assert(lang == IDLOptions::kCSharp);
|
||||
FLATBUFFERS_ASSERT(lang == IDLOptions::kCSharp);
|
||||
return language_parameters[1];
|
||||
}
|
||||
}
|
||||
|
@ -274,7 +274,7 @@ class GeneralGenerator : public BaseGenerator {
|
|||
if (lang_.language == IDLOptions::kJava) {
|
||||
return java_typename[type.base_type];
|
||||
} else {
|
||||
assert(lang_.language == IDLOptions::kCSharp);
|
||||
FLATBUFFERS_ASSERT(lang_.language == IDLOptions::kCSharp);
|
||||
return csharp_typename[type.base_type];
|
||||
}
|
||||
}
|
||||
|
@ -999,7 +999,7 @@ class GeneralGenerator : public BaseGenerator {
|
|||
code += "(obj, o) : null";
|
||||
}
|
||||
break;
|
||||
default: assert(0);
|
||||
default: FLATBUFFERS_ASSERT(0);
|
||||
}
|
||||
}
|
||||
code += member_suffix;
|
||||
|
@ -1427,7 +1427,7 @@ bool GenerateGeneral(const Parser &parser, const std::string &path,
|
|||
|
||||
std::string GeneralMakeRule(const Parser &parser, const std::string &path,
|
||||
const std::string &file_name) {
|
||||
assert(parser.opts.lang <= IDLOptions::kMAX);
|
||||
FLATBUFFERS_ASSERT(parser.opts.lang <= IDLOptions::kMAX);
|
||||
const auto &lang = GetLangParams(parser.opts.lang);
|
||||
|
||||
std::string make_rule;
|
||||
|
|
|
@ -522,7 +522,7 @@ static void GenStructAccessor(const StructDef &struct_def,
|
|||
break;
|
||||
}
|
||||
case BASE_TYPE_UNION: GetUnionField(struct_def, field, code_ptr); break;
|
||||
default: assert(0);
|
||||
default: FLATBUFFERS_ASSERT(0);
|
||||
}
|
||||
}
|
||||
if (field.value.type.base_type == BASE_TYPE_VECTOR) {
|
||||
|
|
|
@ -160,7 +160,7 @@ class FlatBufPrinter : public grpc_generator::Printer {
|
|||
void Indent() { indent_++; }
|
||||
void Outdent() {
|
||||
indent_--;
|
||||
assert(indent_ >= 0);
|
||||
FLATBUFFERS_ASSERT(indent_ >= 0);
|
||||
}
|
||||
|
||||
private:
|
||||
|
|
|
@ -54,7 +54,7 @@ const JsLanguageParameters &GetJsLangParams(IDLOptions::Language lang) {
|
|||
if (lang == IDLOptions::kJs) {
|
||||
return js_language_parameters[0];
|
||||
} else {
|
||||
assert(lang == IDLOptions::kTs);
|
||||
FLATBUFFERS_ASSERT(lang == IDLOptions::kTs);
|
||||
return js_language_parameters[1];
|
||||
}
|
||||
}
|
||||
|
@ -870,7 +870,7 @@ class JsGenerator : public BaseGenerator {
|
|||
" : null;\n";
|
||||
break;
|
||||
|
||||
default: assert(0);
|
||||
default: FLATBUFFERS_ASSERT(0);
|
||||
}
|
||||
}
|
||||
code += "};\n\n";
|
||||
|
@ -1181,7 +1181,7 @@ bool GenerateJS(const Parser &parser, const std::string &path,
|
|||
|
||||
std::string JSMakeRule(const Parser &parser, const std::string &path,
|
||||
const std::string &file_name) {
|
||||
assert(parser.opts.lang <= IDLOptions::kMAX);
|
||||
FLATBUFFERS_ASSERT(parser.opts.lang <= IDLOptions::kMAX);
|
||||
const auto &lang = GetJsLangParams(parser.opts.lang);
|
||||
|
||||
std::string filebase =
|
||||
|
|
|
@ -702,7 +702,7 @@ class PhpGenerator : public BaseGenerator {
|
|||
break;
|
||||
}
|
||||
case BASE_TYPE_UNION: GetUnionField(field, code_ptr); break;
|
||||
default: assert(0);
|
||||
default: FLATBUFFERS_ASSERT(0);
|
||||
}
|
||||
}
|
||||
if (field.value.type.base_type == BASE_TYPE_VECTOR) {
|
||||
|
|
|
@ -459,7 +459,7 @@ static void GenStructAccessor(const StructDef &struct_def,
|
|||
break;
|
||||
}
|
||||
case BASE_TYPE_UNION: GetUnionField(struct_def, field, code_ptr); break;
|
||||
default: assert(0);
|
||||
default: FLATBUFFERS_ASSERT(0);
|
||||
}
|
||||
}
|
||||
if (field.value.type.base_type == BASE_TYPE_VECTOR) {
|
||||
|
|
|
@ -108,7 +108,7 @@ bool Print<const void *>(const void *val, Type type, int indent,
|
|||
case BASE_TYPE_UNION:
|
||||
// If this assert hits, you have an corrupt buffer, a union type field
|
||||
// was not present or was out of range.
|
||||
assert(union_type);
|
||||
FLATBUFFERS_ASSERT(union_type);
|
||||
return Print<const void *>(val, *union_type, indent, nullptr, opts,
|
||||
_text);
|
||||
case BASE_TYPE_STRUCT:
|
||||
|
@ -143,7 +143,7 @@ bool Print<const void *>(const void *val, Type type, int indent,
|
|||
// clang-format on
|
||||
}
|
||||
break;
|
||||
default: assert(0);
|
||||
default: FLATBUFFERS_ASSERT(0);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -173,7 +173,7 @@ static bool GenFieldOffset(const FieldDef &fd, const Table *table, bool fixed,
|
|||
const void *val = nullptr;
|
||||
if (fixed) {
|
||||
// The only non-scalar fields in structs are structs.
|
||||
assert(IsStruct(fd.value.type));
|
||||
FLATBUFFERS_ASSERT(IsStruct(fd.value.type));
|
||||
val = reinterpret_cast<const Struct *>(table)->GetStruct<const void *>(
|
||||
fd.value.offset);
|
||||
} else if (fd.flexbuffer) {
|
||||
|
@ -261,7 +261,7 @@ static bool GenStruct(const StructDef &struct_def, const Table *table,
|
|||
bool GenerateText(const Parser &parser, const void *flatbuffer,
|
||||
std::string *_text) {
|
||||
std::string &text = *_text;
|
||||
assert(parser.root_struct_def_); // call SetRootType()
|
||||
FLATBUFFERS_ASSERT(parser.root_struct_def_); // call SetRootType()
|
||||
text.reserve(1024); // Reduce amount of inevitable reallocs.
|
||||
auto root = parser.opts.size_prefixed ?
|
||||
GetSizePrefixedRoot<Table>(flatbuffer) : GetRoot<Table>(flatbuffer);
|
||||
|
|
|
@ -826,7 +826,7 @@ CheckedError Parser::ParseAnyValue(Value &val, FieldDef *field,
|
|||
const StructDef *parent_struct_def) {
|
||||
switch (val.type.base_type) {
|
||||
case BASE_TYPE_UNION: {
|
||||
assert(field);
|
||||
FLATBUFFERS_ASSERT(field);
|
||||
std::string constant;
|
||||
// Find corresponding type field we may have already parsed.
|
||||
for (auto elem = field_stack_.rbegin();
|
||||
|
@ -843,9 +843,9 @@ CheckedError Parser::ParseAnyValue(Value &val, FieldDef *field,
|
|||
// output these in alphabetical order, meaning it comes after this
|
||||
// value. So we scan past the value to find it, then come back here.
|
||||
auto type_name = field->name + UnionTypeFieldSuffix();
|
||||
assert(parent_struct_def);
|
||||
FLATBUFFERS_ASSERT(parent_struct_def);
|
||||
auto type_field = parent_struct_def->fields.Lookup(type_name);
|
||||
assert(type_field); // Guaranteed by ParseField().
|
||||
FLATBUFFERS_ASSERT(type_field); // Guaranteed by ParseField().
|
||||
// Remember where we are in the source file, so we can come back here.
|
||||
auto backup = *static_cast<ParserState *>(this);
|
||||
ECHECK(SkipAnyJsonValue()); // The table.
|
||||
|
@ -882,7 +882,7 @@ CheckedError Parser::ParseAnyValue(Value &val, FieldDef *field,
|
|||
} else if (enum_val->union_type.base_type == BASE_TYPE_STRING) {
|
||||
ECHECK(ParseString(val));
|
||||
} else {
|
||||
assert(false);
|
||||
FLATBUFFERS_ASSERT(false);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -917,7 +917,7 @@ CheckedError Parser::ParseAnyValue(Value &val, FieldDef *field,
|
|||
}
|
||||
|
||||
void Parser::SerializeStruct(const StructDef &struct_def, const Value &val) {
|
||||
assert(val.constant.length() == struct_def.bytesize);
|
||||
FLATBUFFERS_ASSERT(val.constant.length() == struct_def.bytesize);
|
||||
builder_.Align(struct_def.minalign);
|
||||
builder_.PushBytes(reinterpret_cast<const uint8_t *>(val.constant.c_str()),
|
||||
struct_def.bytesize);
|
||||
|
@ -1105,14 +1105,14 @@ CheckedError Parser::ParseTable(const StructDef &struct_def, std::string *value,
|
|||
if (struct_def.fixed) {
|
||||
builder_.ClearOffsets();
|
||||
builder_.EndStruct();
|
||||
assert(value);
|
||||
FLATBUFFERS_ASSERT(value);
|
||||
// Temporarily store this struct in the value string, since it is to
|
||||
// be serialized in-place elsewhere.
|
||||
value->assign(
|
||||
reinterpret_cast<const char *>(builder_.GetCurrentBufferPointer()),
|
||||
struct_def.bytesize);
|
||||
builder_.PopBytes(struct_def.bytesize);
|
||||
assert(!ovalue);
|
||||
FLATBUFFERS_ASSERT(!ovalue);
|
||||
} else {
|
||||
auto val = builder_.EndTable(start);
|
||||
if (ovalue) *ovalue = val;
|
||||
|
@ -1195,7 +1195,7 @@ CheckedError Parser::ParseNestedFlatbuffer(Value &val, FieldDef *field,
|
|||
|
||||
// Create and initialize new parser
|
||||
Parser nested_parser;
|
||||
assert(field->nested_flatbuffer);
|
||||
FLATBUFFERS_ASSERT(field->nested_flatbuffer);
|
||||
nested_parser.root_struct_def_ = field->nested_flatbuffer;
|
||||
nested_parser.enums_ = enums_;
|
||||
nested_parser.opts = opts;
|
||||
|
@ -1307,7 +1307,7 @@ CheckedError Parser::ParseEnumFromString(Type &type, int64_t *result) {
|
|||
}
|
||||
|
||||
CheckedError Parser::ParseHash(Value &e, FieldDef *field) {
|
||||
assert(field);
|
||||
FLATBUFFERS_ASSERT(field);
|
||||
Value *hash_name = field->attributes.Lookup("hash");
|
||||
switch (e.type.base_type) {
|
||||
case BASE_TYPE_SHORT: {
|
||||
|
@ -1346,7 +1346,7 @@ CheckedError Parser::ParseHash(Value &e, FieldDef *field) {
|
|||
e.constant = NumToString(hashed_value);
|
||||
break;
|
||||
}
|
||||
default: assert(0);
|
||||
default: FLATBUFFERS_ASSERT(0);
|
||||
}
|
||||
NEXT();
|
||||
return NoError();
|
||||
|
@ -1401,7 +1401,7 @@ CheckedError Parser::ParseSingleValue(const std::string *name, Value &e) {
|
|||
e.constant = NumToString(strtod(attribute_.c_str(), &end));
|
||||
if (*end) return Error("invalid float: " + attribute_);
|
||||
} else {
|
||||
assert(0); // Shouldn't happen, we covered all types.
|
||||
FLATBUFFERS_ASSERT(0); // Shouldn't happen, we covered all types.
|
||||
e.constant = "0";
|
||||
}
|
||||
NEXT();
|
||||
|
@ -2279,7 +2279,7 @@ CheckedError Parser::ParseRoot(const char *source, const char **include_paths,
|
|||
auto &bt = field.value.type.base_type == BASE_TYPE_VECTOR
|
||||
? field.value.type.element
|
||||
: field.value.type.base_type;
|
||||
assert(bt == BASE_TYPE_STRUCT);
|
||||
FLATBUFFERS_ASSERT(bt == BASE_TYPE_STRUCT);
|
||||
bt = enum_def->underlying_type.base_type;
|
||||
struct_def.refcount--;
|
||||
enum_def->refcount++;
|
||||
|
@ -2613,7 +2613,7 @@ Definition::SerializeAttributes(FlatBufferBuilder *builder,
|
|||
std::vector<flatbuffers::Offset<reflection::KeyValue>> attrs;
|
||||
for (auto kv = attributes.dict.begin(); kv != attributes.dict.end(); ++kv) {
|
||||
auto it = parser.known_attributes_.find(kv->first);
|
||||
assert(it != parser.known_attributes_.end());
|
||||
FLATBUFFERS_ASSERT(it != parser.known_attributes_.end());
|
||||
if (!it->second) { // Custom attribute.
|
||||
attrs.push_back(reflection::CreateKeyValue(
|
||||
*builder, builder->CreateString(kv->first),
|
||||
|
|
|
@ -275,7 +275,7 @@ class ResizeContext {
|
|||
break;
|
||||
}
|
||||
case reflection::String: break;
|
||||
default: assert(false);
|
||||
default: FLATBUFFERS_ASSERT(false);
|
||||
}
|
||||
}
|
||||
// Check if the vtable offset points beyond the insertion point.
|
||||
|
@ -478,7 +478,7 @@ Offset<const Table *> CopyTable(FlatBufferBuilder &fbb,
|
|||
}
|
||||
}
|
||||
}
|
||||
assert(offset_idx == offsets.size());
|
||||
FLATBUFFERS_ASSERT(offset_idx == offsets.size());
|
||||
if (objectdef.is_struct()) {
|
||||
fbb.ClearOffsets();
|
||||
return fbb.EndStruct();
|
||||
|
@ -518,11 +518,11 @@ bool VerifyObject(flatbuffers::Verifier &v, const reflection::Schema &schema,
|
|||
bool VerifyVector(flatbuffers::Verifier &v, const reflection::Schema &schema,
|
||||
const flatbuffers::Table &table,
|
||||
const reflection::Field &vec_field) {
|
||||
assert(vec_field.type()->base_type() == reflection::Vector);
|
||||
FLATBUFFERS_ASSERT(vec_field.type()->base_type() == reflection::Vector);
|
||||
if (!table.VerifyField<uoffset_t>(v, vec_field.offset())) return false;
|
||||
|
||||
switch (vec_field.type()->element()) {
|
||||
case reflection::None: assert(false); break;
|
||||
case reflection::None: FLATBUFFERS_ASSERT(false); break;
|
||||
case reflection::UType:
|
||||
return v.Verify(flatbuffers::GetFieldV<uint8_t>(table, vec_field));
|
||||
case reflection::Bool:
|
||||
|
@ -552,7 +552,7 @@ bool VerifyVector(flatbuffers::Verifier &v, const reflection::Schema &schema,
|
|||
return false;
|
||||
}
|
||||
}
|
||||
case reflection::Vector: assert(false); break;
|
||||
case reflection::Vector: FLATBUFFERS_ASSERT(false); break;
|
||||
case reflection::Obj: {
|
||||
auto obj = schema.objects()->Get(vec_field.type()->index());
|
||||
if (obj->is_struct()) {
|
||||
|
@ -575,8 +575,8 @@ bool VerifyVector(flatbuffers::Verifier &v, const reflection::Schema &schema,
|
|||
}
|
||||
return true;
|
||||
}
|
||||
case reflection::Union: assert(false); break;
|
||||
default: assert(false); break;
|
||||
case reflection::Union: FLATBUFFERS_ASSERT(false); break;
|
||||
default: FLATBUFFERS_ASSERT(false); break;
|
||||
}
|
||||
|
||||
return false;
|
||||
|
@ -597,7 +597,7 @@ bool VerifyObject(flatbuffers::Verifier &v, const reflection::Schema &schema,
|
|||
for (uoffset_t i = 0; i < obj.fields()->size(); i++) {
|
||||
auto field_def = obj.fields()->Get(i);
|
||||
switch (field_def->type()->base_type()) {
|
||||
case reflection::None: assert(false); break;
|
||||
case reflection::None: FLATBUFFERS_ASSERT(false); break;
|
||||
case reflection::UType:
|
||||
if (!table->VerifyField<uint8_t>(v, field_def->offset())) return false;
|
||||
break;
|
||||
|
@ -665,7 +665,7 @@ bool VerifyObject(flatbuffers::Verifier &v, const reflection::Schema &schema,
|
|||
}
|
||||
break;
|
||||
}
|
||||
default: assert(false); break;
|
||||
default: FLATBUFFERS_ASSERT(false); break;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -47,12 +47,12 @@ static LoadFileFunction g_load_file_function = LoadFileRaw;
|
|||
static FileExistsFunction g_file_exists_function = FileExistsRaw;
|
||||
|
||||
bool LoadFile(const char *name, bool binary, std::string *buf) {
|
||||
assert(g_load_file_function);
|
||||
FLATBUFFERS_ASSERT(g_load_file_function);
|
||||
return g_load_file_function(name, binary, buf);
|
||||
}
|
||||
|
||||
bool FileExists(const char *name) {
|
||||
assert(g_file_exists_function);
|
||||
FLATBUFFERS_ASSERT(g_file_exists_function);
|
||||
return g_file_exists_function(name);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//Generated by flatc compiler (version 1.8.0)
|
||||
//Generated by flatc compiler (version 1.9.0)
|
||||
//If you make any local changes, they will be lost
|
||||
//source: monster_test.fbs
|
||||
|
||||
|
|
|
@ -2168,7 +2168,7 @@ inline flatbuffers::Offset<void> AnyUnion::Pack(flatbuffers::FlatBufferBuilder &
|
|||
inline AnyUnion::AnyUnion(const AnyUnion &u) FLATBUFFERS_NOEXCEPT : type(u.type), value(nullptr) {
|
||||
switch (type) {
|
||||
case Any_Monster: {
|
||||
assert(false); // MonsterT not copyable.
|
||||
FLATBUFFERS_ASSERT(false); // MonsterT not copyable.
|
||||
break;
|
||||
}
|
||||
case Any_TestSimpleTableWithEnum: {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// automatically generated by the FlatBuffers compiler, do not modify
|
||||
|
||||
import * as NS9459827973991502386 from "./namespace_test1_generated";
|
||||
import * as NS11563891686210618450 from "./namespace_test1_generated";
|
||||
/**
|
||||
* @constructor
|
||||
*/
|
||||
|
@ -39,24 +39,24 @@ static getRootAsTableInFirstNS(bb:flatbuffers.ByteBuffer, obj?:TableInFirstNS):T
|
|||
* @param {NamespaceA.NamespaceB.TableInNestedNS=} obj
|
||||
* @returns {NamespaceA.NamespaceB.TableInNestedNS|null}
|
||||
*/
|
||||
fooTable(obj?:NS9459827973991502386.NamespaceA.NamespaceB.TableInNestedNS):NS9459827973991502386.NamespaceA.NamespaceB.TableInNestedNS|null {
|
||||
fooTable(obj?:NS11563891686210618450.NamespaceA.NamespaceB.TableInNestedNS):NS11563891686210618450.NamespaceA.NamespaceB.TableInNestedNS|null {
|
||||
var offset = this.bb!.__offset(this.bb_pos, 4);
|
||||
return offset ? (obj || new NS9459827973991502386.NamespaceA.NamespaceB.TableInNestedNS).__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) : null;
|
||||
return offset ? (obj || new NS11563891686210618450.NamespaceA.NamespaceB.TableInNestedNS).__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) : null;
|
||||
};
|
||||
|
||||
/**
|
||||
* @returns {NamespaceA.NamespaceB.EnumInNestedNS}
|
||||
*/
|
||||
fooEnum():NS9459827973991502386.NamespaceA.NamespaceB.EnumInNestedNS {
|
||||
fooEnum():NS11563891686210618450.NamespaceA.NamespaceB.EnumInNestedNS {
|
||||
var offset = this.bb!.__offset(this.bb_pos, 6);
|
||||
return offset ? /** @type {NamespaceA.NamespaceB.EnumInNestedNS} */ (this.bb!.readInt8(this.bb_pos + offset)) : NS9459827973991502386.NamespaceA.NamespaceB.EnumInNestedNS.A;
|
||||
return offset ? /** @type {NamespaceA.NamespaceB.EnumInNestedNS} */ (this.bb!.readInt8(this.bb_pos + offset)) : NS11563891686210618450.NamespaceA.NamespaceB.EnumInNestedNS.A;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {NamespaceA.NamespaceB.EnumInNestedNS} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
mutate_foo_enum(value:NS9459827973991502386.NamespaceA.NamespaceB.EnumInNestedNS):boolean {
|
||||
mutate_foo_enum(value:NS11563891686210618450.NamespaceA.NamespaceB.EnumInNestedNS):boolean {
|
||||
var offset = this.bb!.__offset(this.bb_pos, 6);
|
||||
|
||||
if (offset === 0) {
|
||||
|
@ -71,9 +71,9 @@ mutate_foo_enum(value:NS9459827973991502386.NamespaceA.NamespaceB.EnumInNestedNS
|
|||
* @param {NamespaceA.NamespaceB.StructInNestedNS=} obj
|
||||
* @returns {NamespaceA.NamespaceB.StructInNestedNS|null}
|
||||
*/
|
||||
fooStruct(obj?:NS9459827973991502386.NamespaceA.NamespaceB.StructInNestedNS):NS9459827973991502386.NamespaceA.NamespaceB.StructInNestedNS|null {
|
||||
fooStruct(obj?:NS11563891686210618450.NamespaceA.NamespaceB.StructInNestedNS):NS11563891686210618450.NamespaceA.NamespaceB.StructInNestedNS|null {
|
||||
var offset = this.bb!.__offset(this.bb_pos, 8);
|
||||
return offset ? (obj || new NS9459827973991502386.NamespaceA.NamespaceB.StructInNestedNS).__init(this.bb_pos + offset, this.bb!) : null;
|
||||
return offset ? (obj || new NS11563891686210618450.NamespaceA.NamespaceB.StructInNestedNS).__init(this.bb_pos + offset, this.bb!) : null;
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -95,8 +95,8 @@ static addFooTable(builder:flatbuffers.Builder, fooTableOffset:flatbuffers.Offse
|
|||
* @param {flatbuffers.Builder} builder
|
||||
* @param {NamespaceA.NamespaceB.EnumInNestedNS} fooEnum
|
||||
*/
|
||||
static addFooEnum(builder:flatbuffers.Builder, fooEnum:NS9459827973991502386.NamespaceA.NamespaceB.EnumInNestedNS) {
|
||||
builder.addFieldInt8(1, fooEnum, NS9459827973991502386.NamespaceA.NamespaceB.EnumInNestedNS.A);
|
||||
static addFooEnum(builder:flatbuffers.Builder, fooEnum:NS11563891686210618450.NamespaceA.NamespaceB.EnumInNestedNS) {
|
||||
builder.addFieldInt8(1, fooEnum, NS11563891686210618450.NamespaceA.NamespaceB.EnumInNestedNS.A);
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
Loading…
Reference in New Issue