aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorToni Uhlig <matzeton@googlemail.com>2023-07-16 10:39:26 +0200
committerToni Uhlig <matzeton@googlemail.com>2023-07-16 10:39:26 +0200
commit0883ee7ce77df4649f8a4fc10bd22164649e2883 (patch)
tree322e038874217119d4215c4c9308dafedfb06de8
parentb31e4bc16d1df62b50c6f77a77041f9e7b6c906d (diff)
Added flatcc (C flatbuffers implementation).
* lightweight && faster than protocol buffers as well as it's C implementation (protobuf-c) Signed-off-by: Toni Uhlig <matzeton@googlemail.com>
-rw-r--r--Makefile55
-rw-r--r--examples/driver-flatbuffers.bat27
-rw-r--r--examples/driver-flatbuffers.cpp374
-rw-r--r--examples/flatbuffers_common_builder.h685
-rw-r--r--examples/flatbuffers_common_reader.h578
-rw-r--r--examples/monster.fbs32
-rw-r--r--examples/monster_builder.h160
-rw-r--r--examples/monster_reader.h176
-rw-r--r--examples/monster_verifier.h106
-rw-r--r--flatcc/include/flatcc/portable/paligned_alloc.h2
-rwxr-xr-xflatcc/update.sh6
11 files changed, 2193 insertions, 8 deletions
diff --git a/Makefile b/Makefile
index 357c19a..998f7aa 100644
--- a/Makefile
+++ b/Makefile
@@ -4,6 +4,10 @@ endif
include $(DPP_ROOT)/Makefile.inc
+FLATBUFFERS_LIB = flatbuffers-build/lib/libflatccrt.a
+FLATBUFFERS_CFLAGS = -Iflatbuffers-build/include -Wno-misleading-indentation
+FLATBUFFERS_FLATC = flatbuffers-flatcc-build/bin/flatcc
+
DRIVER0_NAME = driver
DRIVER0_OBJECTS = examples/$(DRIVER0_NAME).o ksocket/ksocket.o ksocket/berkeley.o
DRIVER0_TARGET = $(DRIVER0_NAME).sys
@@ -16,6 +20,10 @@ DRIVER2_NAME = driver-protobuf-c-tcp
DRIVER2_OBJECTS = examples/$(DRIVER2_NAME).o ksocket/ksocket.o ksocket/berkeley.o protobuf-c/protobuf-c.o examples/example.pb-c.o
DRIVER2_TARGET = $(DRIVER2_NAME).sys
+DRIVER3_NAME = driver-flatbuffers
+DRIVER3_OBJECTS = examples/$(DRIVER3_NAME).o ksocket/ksocket.o ksocket/berkeley.o $(FLATBUFFERS_LIB)
+DRIVER3_TARGET = $(DRIVER3_NAME).sys
+
USERSPACE0_NAME = userspace_client
USERSPACE0_OBJECTS = examples/$(USERSPACE0_NAME).o
USERSPACE0_TARGET = $(USERSPACE0_NAME).exe
@@ -26,11 +34,11 @@ USERSPACE1_TARGET = $(USERSPACE1_NAME).exe
# mingw-w64-dpp related
CFLAGS_protobuf-c/protobuf-c.o = -Wno-unused-but-set-variable
-CUSTOM_CFLAGS = -I. -Wl,--exclude-all-symbols -DNDEBUG
+CUSTOM_CFLAGS = -I. -Iexamples $(FLATBUFFERS_CFLAGS) -Wl,--exclude-all-symbols -DNDEBUG
DRIVER_LIBS += -lnetio
USER_LIBS += -lws2_32
-all: $(DRIVER0_TARGET) $(DRIVER1_TARGET) $(DRIVER2_TARGET) $(USERSPACE0_TARGET) $(USERSPACE1_TARGET)
+all: deps $(DRIVER0_TARGET) $(DRIVER1_TARGET) $(DRIVER2_TARGET) $(DRIVER3_TARGET) $(USERSPACE0_TARGET) $(USERSPACE1_TARGET)
%.o: %.cpp
$(call BUILD_CPP_OBJECT,$<,$@)
@@ -47,12 +55,39 @@ $(DRIVER1_TARGET): $(DRIVER1_OBJECTS)
$(DRIVER2_TARGET): $(DRIVER2_OBJECTS)
$(call LINK_CPP_KERNEL_TARGET,$(DRIVER2_OBJECTS),$@)
+$(DRIVER3_TARGET): $(FLATBUFFERS_LIB) $(DRIVER3_OBJECTS)
+ $(call LINK_CPP_KERNEL_TARGET,$(DRIVER3_OBJECTS),$@)
+
$(USERSPACE0_TARGET): $(USERSPACE0_OBJECTS)
$(call LINK_CPP_USER_TARGET,$(USERSPACE0_OBJECTS),$@)
$(USERSPACE1_TARGET): $(USERSPACE1_OBJECTS)
$(call LINK_CPP_USER_TARGET,$(USERSPACE1_OBJECTS),$@)
+deps: $(FLATBUFFERS_LIB) $(FLATBUFFERS_FLATC)
+
+$(FLATBUFFERS_LIB):
+ cmake -S flatcc -B flatbuffers-build \
+ -DCMAKE_INSTALL_PREFIX=/ \
+ -DCMAKE_C_COMPILER="$(realpath $(CC))" \
+ -DCMAKE_SYSTEM_NAME="Windows" \
+ -DCMAKE_C_FLAGS='$(CFLAGS)' \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DFLATCC_ALLOW_WERROR=ON -DFLATCC_COVERAGE=OFF -DFLATCC_CXX_TEST=OFF \
+ -DFLATCC_REFLECTION=OFF -DFLATCC_RTONLY=ON -DFLATCC_TEST=OFF -DFLATCC_INSTALL=ON \
+ -DFLATCC_GNU_POSIX_MEMALIGN=OFF
+ cmake --build flatbuffers-build
+ make -C flatbuffers-build install DESTDIR="$(realpath .)/flatbuffers-build"
+
+$(FLATBUFFERS_FLATC):
+ cmake -S flatcc -B flatbuffers-flatcc-build \
+ -DCMAKE_INSTALL_PREFIX=/ \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DFLATCC_ALLOW_WERROR=ON -DFLATCC_COVERAGE=OFF -DFLATCC_CXX_TEST=OFF \
+ -DFLATCC_REFLECTION=OFF -DFLATCC_RTONLY=OFF -DFLATCC_TEST=OFF -DFLATCC_INSTALL=ON
+ cmake --build flatbuffers-flatcc-build
+ make -C flatbuffers-flatcc-build install DESTDIR="$(realpath .)/flatbuffers-flatcc-build"
+
generate:
@echo '=========================================='
@echo '= You need protobuf-c to make this work! ='
@@ -61,24 +96,30 @@ generate:
@echo
protoc-c --c_out=. examples/example.proto
-install: $(DRIVER0_TARGET) $(DRIVER1_TARGET) $(DRIVER2_TARGET) $(USERSPACE0_TARGET) $(USERSPACE1_TARGET)
+install: $(DRIVER0_TARGET) $(DRIVER1_TARGET) $(DRIVER2_TARGET) $(DRIVER3_TARGET) $(USERSPACE0_TARGET) $(USERSPACE1_TARGET)
$(call INSTALL_EXEC_SIGN,$(DRIVER0_TARGET))
$(call INSTALL_EXEC_SIGN,$(DRIVER1_TARGET))
$(call INSTALL_EXEC_SIGN,$(DRIVER2_TARGET))
+ $(call INSTALL_EXEC_SIGN,$(DRIVER3_TARGET))
$(call INSTALL_EXEC,$(USERSPACE0_TARGET))
$(call INSTALL_EXEC,$(USERSPACE1_TARGET))
$(INSTALL) 'examples/$(DRIVER0_NAME).bat' '$(DESTDIR)/'
$(INSTALL) 'examples/$(DRIVER1_NAME).bat' '$(DESTDIR)/'
$(INSTALL) 'examples/$(DRIVER2_NAME).bat' '$(DESTDIR)/'
+ $(INSTALL) 'examples/$(DRIVER3_NAME).bat' '$(DESTDIR)/'
clean:
- rm -f $(DRIVER0_OBJECTS) $(DRIVER1_OBJECTS) $(DRIVER2_OBJECTS)
+ rm -f $(DRIVER0_OBJECTS) $(DRIVER1_OBJECTS) $(DRIVER2_OBJECTS) $(DRIVER3_OBJECTS)
rm -f $(DRIVER0_TARGET) $(DRIVER0_TARGET).map \
$(DRIVER1_TARGET) $(DRIVER1_TARGET).map \
- $(DRIVER2_TARGET) $(DRIVER2_TARGET).map
+ $(DRIVER2_TARGET) $(DRIVER2_TARGET).map \
+ $(DRIVER3_TARGET) $(DRIVER3_TARGET).map
rm -f $(USERSPACE0_OBJECTS) $(USERSPACE1_OBJECTS)
rm -f $(USERSPACE0_TARGET) $(USERSPACE1_TARGET)
-.NOTPARALLEL: clean
-.PHONY: all install clean
+distclean: clean
+ rm -rf flatbuffers-build flatbuffers-flatcc-build
+
+.NOTPARALLEL: clean distclean
+.PHONY: all install clean distclean
.DEFAULT_GOAL := all
diff --git a/examples/driver-flatbuffers.bat b/examples/driver-flatbuffers.bat
new file mode 100644
index 0000000..c9eaea8
--- /dev/null
+++ b/examples/driver-flatbuffers.bat
@@ -0,0 +1,27 @@
+@echo off
+set SERVICE_NAME=flatbuffers
+set DRIVER="%~dp0\driver-flatbuffers.sys"
+
+net session >nul 2>&1
+if NOT %ERRORLEVEL% EQU 0 (
+ echo ERROR: This script requires Administrator privileges!
+ pause
+ exit /b 1
+)
+
+echo ---------------------------------------
+echo -- Service Name: %SERVICE_NAME%
+echo -- Driver......: %DRIVER%
+echo ---------------------------------------
+
+sc create %SERVICE_NAME% binPath= %DRIVER% type= kernel
+echo ---------------------------------------
+sc start %SERVICE_NAME%
+echo ---------------------------------------
+sc query %SERVICE_NAME%
+echo [PRESS A KEY TO STOP THE DRIVER]
+pause
+sc stop %SERVICE_NAME%
+sc delete %SERVICE_NAME%
+echo Done.
+timeout /t 3
diff --git a/examples/driver-flatbuffers.cpp b/examples/driver-flatbuffers.cpp
new file mode 100644
index 0000000..758cbf3
--- /dev/null
+++ b/examples/driver-flatbuffers.cpp
@@ -0,0 +1,374 @@
+extern "C" {
+#include <ksocket/berkeley.h>
+#include <ksocket/ksocket.h>
+#include <ksocket/wsk.h>
+
+#include "monster_builder.h"
+
+DRIVER_INITIALIZE DriverEntry;
+DRIVER_UNLOAD DriverUnload;
+
+// Convenient namespace macro to manage long namespace prefix.
+// The ns macro makes it possible to write `ns(Monster_create(...))`
+// instead of `MyGame_Sample_Monster_create(...)`
+#undef ns
+#define ns(x) \
+ FLATBUFFERS_WRAP_NAMESPACE(MyGame_Sample, x) // Specified in the schema.
+
+// A helper to simplify creating vectors from C-arrays.
+#define c_vec_len(V) (sizeof(V) / sizeof((V)[0]))
+
+// This allows us to verify result in optimized builds.
+#define test_assert(x) \
+ do { \
+ if (!(x)) { \
+ DebuggerPrint("%s\n", "Assert Failed: " #x); \
+ return -1; \
+ } \
+ } while (0)
+
+#define DebuggerPrint(...) \
+ DbgPrintEx(DPFLTR_IHVDRIVER_ID, DPFLTR_ERROR_LEVEL, __VA_ARGS__);
+
+// Bottom-up approach where we create child objects and store these
+// in temporary references before a parent object is created with
+// these references.
+int create_monster_bottom_up(flatcc_builder_t *B, int flexible) {
+ flatbuffers_string_ref_t weapon_one_name =
+ flatbuffers_string_create_str(B, "Sword");
+ int16_t weapon_one_damage = 3;
+
+ flatbuffers_string_ref_t weapon_two_name =
+ flatbuffers_string_create_str(B, "Axe");
+ int16_t weapon_two_damage = 5;
+
+ // Use the `MyGame_Sample_Weapon_create` shortcut to create Weapons
+ // with all the fields set.
+ //
+ // In the C-API, verbs (here create) always follow the type name
+ // (here Weapon), prefixed by the namespace (here MyGame_Sample_):
+ // MyGame_Sample_Weapon_create(...), or ns(Weapone_create(...)).
+ ns(Weapon_ref_t) sword =
+ ns(Weapon_create(B, weapon_one_name, weapon_one_damage));
+ ns(Weapon_ref_t) axe =
+ ns(Weapon_create(B, weapon_two_name, weapon_two_damage));
+
+ // Serialize a name for our monster, called "Orc".
+ // The _str suffix indicates the source is an ascii-z string.
+ flatbuffers_string_ref_t name = flatbuffers_string_create_str(B, "Orc");
+
+ // Create a `vector` representing the inventory of the Orc. Each number
+ // could correspond to an item that can be claimed after he is slain.
+ uint8_t treasure[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
+ flatbuffers_uint8_vec_ref_t inventory;
+ // `c_vec_len` is the convenience macro we defined earlier.
+ inventory = flatbuffers_uint8_vec_create(B, treasure, c_vec_len(treasure));
+
+ // Here we use a top-down approach locally to build a Weapons vector
+ // in-place instead of creating a temporary external vector to use
+ // as argument like we did with the `inventory` earlier on, but the
+ // overall approach is still bottom-up.
+ ns(Weapon_vec_start(B));
+ ns(Weapon_vec_push(B, sword));
+ ns(Weapon_vec_push(B, axe));
+ ns(Weapon_vec_ref_t) weapons = ns(Weapon_vec_end(B));
+
+ // Create a `Vec3`, representing the Orc's position in 3-D space.
+ ns(Vec3_t) pos = {1.0f, 2.0f, 3.0f};
+
+ // Set his hit points to 300 and his mana to 150.
+ int16_t hp = 300;
+ // The default value is 150, so we will never store this field.
+ int16_t mana = 150;
+
+ // Create the equipment union. In the C++ language API this is given
+ // as two arguments to the create call, or as two separate add
+ // operations for the type and the table reference. Here we create
+ // a single union value that carries both the type and reference.
+ ns(Equipment_union_ref_t) equipped = ns(Equipment_as_Weapon(axe));
+
+ if (!flexible) {
+ // Finally, create the monster using the `Monster_create` helper function
+ // to set all fields.
+ //
+ // Note that the Equipment union only take up one argument in C, where
+ // C++ takes a type and an object argument.
+ ns(Monster_create_as_root(B, &pos, mana, hp, name, inventory, ns(Color_Red),
+ weapons, equipped));
+
+ // Unlike C++ we do not use a Finish call. Instead we use the
+ // `create_as_root` action which has better type safety and
+ // simplicity.
+ //
+ // However, we can also express this as:
+ //
+ // ns(Monster_ref_t) orc = ns(Monster_create(B, ...));
+ // flatcc_builder_buffer_create(orc);
+ //
+ // In this approach the function should return the orc and
+ // let a calling function handle the flatcc_buffer_create call
+ // for a more composable setup that is also able to create child
+ // monsters. In general, `flatcc_builder` calls are best isolated
+ // in a containing driver function.
+
+ } else {
+
+ // A more flexible approach where we mix bottom-up and top-down
+ // style. We still create child objects first, but then create
+ // a top-down style monster object that we can manipulate in more
+ // detail.
+
+ // It is important to pair `start_as_root` with `end_as_root`.
+ ns(Monster_start_as_root(B));
+ ns(Monster_pos_create(B, 1.0f, 2.0f, 3.0f));
+ // or alternatively
+ // ns(Monster_pos_add(&pos);
+
+ ns(Monster_hp_add(B, hp));
+ // Notice that `Monser_name_add` adds a string reference unlike the
+ // add_str and add_strn variants.
+ ns(Monster_name_add(B, name));
+ ns(Monster_inventory_add(B, inventory));
+ ns(Monster_color_add(B, ns(Color_Red)));
+ ns(Monster_weapons_add(B, weapons));
+ ns(Monster_equipped_add(B, equipped));
+ // Complete the monster object and make it the buffer root object.
+ ns(Monster_end_as_root(B));
+
+ // We could also drop the `as_root` suffix from Monster_start/end(B)
+ // and add the table as buffer root later:
+ //
+ // ns(Monster_ref_t) orc = ns(Monster_start(B));
+ // ...
+ // ns(Monster_ref_t) orc = ns(Monster_end(B));
+ // flatcc_builder_buffer_create(orc);
+ //
+ // It is best to keep the `flatcc_builder` calls in a containing
+ // driver function for modularity.
+ }
+ return 0;
+}
+
+// Alternative top-down approach where parent objects are created before
+// their children. We only need to save one reference because the `axe`
+// object is used in two places effectively making the buffer object
+// graph a DAG.
+int create_monster_top_down(flatcc_builder_t *B) {
+ uint8_t treasure[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
+ size_t treasure_count = c_vec_len(treasure);
+ ns(Weapon_ref_t) axe;
+
+ // NOTE: if we use end_as_root, we MUST also start as root.
+ ns(Monster_start_as_root(B));
+ ns(Monster_pos_create(B, 1.0f, 2.0f, 3.0f));
+ ns(Monster_hp_add(B, 300));
+ // ns(Monster_mana_add(B, 150));
+ // We use create_str instead of add because we have no existing string
+ // reference.
+ ns(Monster_name_create_str(B, "Orc"));
+ // Again we use create because we no existing vector object, only a C-array.
+ ns(Monster_inventory_create(B, treasure, treasure_count));
+ ns(Monster_color_add(B, ns(Color_Red)));
+ if (1) {
+ ns(Monster_weapons_start(B));
+ ns(Monster_weapons_push_create(B, flatbuffers_string_create_str(B, "Sword"),
+ 3));
+ // We reuse the axe object later. Note that we dereference a pointer
+ // because push always returns a short-term pointer to the stored element.
+ // We could also have created the axe object first and simply pushed it.
+ axe = *ns(Monster_weapons_push_create(
+ B, flatbuffers_string_create_str(B, "Axe"), 5));
+ ns(Monster_weapons_end(B));
+ } else {
+ // We can have more control with the table elements added to a vector:
+ //
+ ns(Monster_weapons_start(B));
+ ns(Monster_weapons_push_start(B));
+ ns(Weapon_name_create_str(B, "Sword"));
+ ns(Weapon_damage_add(B, 3));
+ ns(Monster_weapons_push_end(B));
+ ns(Monster_weapons_push_start(B));
+ ns(Weapon_name_create_str(B, "Axe"));
+ ns(Weapon_damage_add(B, 5));
+ axe = *ns(Monster_weapons_push_end(B));
+ ns(Monster_weapons_end(B));
+ }
+ // Unions can get their type by using a type-specific add/create/start method.
+ ns(Monster_equipped_Weapon_add(B, axe));
+
+ ns(Monster_end_as_root(B));
+ return 0;
+}
+
+// This isn't strictly needed because the builder already included the reader,
+// but we would need it if our reader were in a separate file.
+#include "monster_reader.h"
+
+#undef ns
+#define ns(x) \
+ FLATBUFFERS_WRAP_NAMESPACE(MyGame_Sample, x) // Specified in the schema.
+
+int access_monster_buffer(const void *buffer) {
+ // Note that we use the `table_t` suffix when reading a table object
+ // as opposed to the `ref_t` suffix used during the construction of
+ // the buffer.
+ ns(Monster_table_t) monster = ns(Monster_as_root(buffer));
+
+ // Note: root object pointers are NOT the same as the `buffer` pointer.
+
+ // Make sure the buffer is accessible.
+ test_assert(monster != 0);
+
+ int16_t hp = ns(Monster_hp(monster));
+ int16_t mana = ns(Monster_mana(monster));
+ // This is just a const char *, but it also supports a fast length operation.
+ flatbuffers_string_t name = ns(Monster_name(monster));
+ size_t name_len = flatbuffers_string_len(name);
+
+ test_assert(hp == 300);
+ // Since 150 is the default, we are reading a value that wasn't stored.
+ test_assert(mana == 150);
+ test_assert(0 == strcmp(name, "Orc"));
+ test_assert(name_len == strlen("Orc"));
+
+ int hp_present = ns(Monster_hp_is_present(monster)); // 1
+ int mana_present = ns(Monster_mana_is_present(monster)); // 0
+ test_assert(hp_present);
+ test_assert(!mana_present);
+
+ ns(Vec3_struct_t) pos = ns(Monster_pos(monster));
+ // Make sure pos has been set.
+ test_assert(pos != 0);
+ float x = ns(Vec3_x(pos));
+ float y = ns(Vec3_y(pos));
+ float z = ns(Vec3_z(pos));
+
+ // The literal `f` suffix is important because double literals does
+ // not always map cleanly to 32-bit represention even with only a few digits:
+ // `1.0 == 1.0f`, but `3.2 != 3.2f`.
+ test_assert(x == 1.0f);
+ test_assert(y == 2.0f);
+ test_assert(z == 3.0f);
+
+ // We can also read the position into a C-struct. We have to copy
+ // because we generally do not know if the native endian format
+ // matches the one stored in the buffer (pe: protocol endian).
+ ns(Vec3_t) pos_vec;
+ // `pe` indicates endian conversion from protocol to native.
+ ns(Vec3_copy_from_pe(&pos_vec, pos));
+ test_assert(pos_vec.x == 1.0f);
+ test_assert(pos_vec.y == 2.0f);
+ test_assert(pos_vec.z == 3.0f);
+
+ // This is a const uint8_t *, but it shouldn't be accessed directly
+ // to ensure proper endian conversion. However, uint8 (ubyte) are
+ // not sensitive endianness, so we *could* have accessed it directly.
+ // The compiler likely optimizes this so that it doesn't matter.
+ flatbuffers_uint8_vec_t inv = ns(Monster_inventory(monster));
+ size_t inv_len = flatbuffers_uint8_vec_len(inv);
+ // Make sure the inventory has been set.
+ test_assert(inv != 0);
+ // If `inv` were absent, the length would 0, so the above test is redundant.
+ test_assert(inv_len == 10);
+ // Index 0 is the first, index 2 is the third.
+ // NOTE: C++ uses the `Get` terminology for vector elemetns, C use `at`.
+ uint8_t third_item = flatbuffers_uint8_vec_at(inv, 2);
+ test_assert(third_item == 2);
+
+ ns(Weapon_vec_t) weapons = ns(Monster_weapons(monster));
+ size_t weapons_len = ns(Weapon_vec_len(weapons));
+ test_assert(weapons_len == 2);
+ // We can use `const char *` instead of `flatbuffers_string_t`.
+ const char *second_weapon_name =
+ ns(Weapon_name(ns(Weapon_vec_at(weapons, 1))));
+ int16_t second_weapon_damage =
+ ns(Weapon_damage(ns(Weapon_vec_at(weapons, 1))));
+ test_assert(second_weapon_name != 0 &&
+ strcmp(second_weapon_name, "Axe") == 0);
+ test_assert(second_weapon_damage == 5);
+
+ // Access union type field.
+ if (ns(Monster_equipped_type(monster)) == ns(Equipment_Weapon)) {
+ // Cast to appropriate type:
+ // C does not require the cast to Weapon_table_t, but C++ does.
+ ns(Weapon_table_t) weapon =
+ (ns(Weapon_table_t))ns(Monster_equipped(monster));
+ const char *weapon_name = ns(Weapon_name(weapon));
+ int16_t weapon_damage = ns(Weapon_damage(weapon));
+
+ test_assert(0 == strcmp(weapon_name, "Axe"));
+ test_assert(weapon_damage == 5);
+ }
+ return 0;
+}
+
+NTSTATUS
+NTAPI
+DriverEntry(_In_ PDRIVER_OBJECT DriverObject,
+ _In_ PUNICODE_STRING RegistryPath) {
+ UNREFERENCED_PARAMETER(DriverObject);
+ UNREFERENCED_PARAMETER(RegistryPath);
+
+ DebuggerPrint("Hello.");
+
+ // Create a `FlatBufferBuilder`, which will be used to create our
+ // monsters' FlatBuffers.
+ flatcc_builder_t builder;
+ void *buf;
+ size_t size;
+
+ // Initialize the builder object.
+ flatcc_builder_init(&builder);
+ test_assert(0 == create_monster_bottom_up(&builder, 0));
+
+ // Allocate and extract a readable buffer from internal builder heap.
+ // NOTE: Finalizing the buffer does NOT change the builder, it
+ // just creates a snapshot of the builder content.
+ // NOTE2: finalize_buffer uses malloc while finalize_aligned_buffer
+ // uses a portable aligned allocation method. Often the malloc
+ // version is sufficient, but won't work for all schema on all
+ // systems. If the buffer is written to disk or network, but not
+ // accessed in memory, `finalize_buffer` is also sufficient.
+ // The flatcc_builder version of free or aligned_free should be used
+ // instead of `free` although free will often work on POSIX systems.
+ // This ensures portability and prevents issues when linking to
+ // allocation libraries other than malloc.
+ buf = flatcc_builder_finalize_aligned_buffer(&builder, &size);
+ // buf = flatcc_builder_finalize_buffer(&builder, &size);
+
+ // We now have a FlatBuffer we can store on disk or send over a network.
+ // ** file/network code goes here :) **
+ // Instead, we're going to access it right away (as if we just received it).
+ // access_monster_buffer(buf);
+
+ // prior to v0.5.0, use `aligned_free`
+ flatcc_builder_aligned_free(buf);
+ // free(buf);
+ //
+ // The builder object can optionally be reused after a reset which
+ // is faster than creating a new builder. Subsequent use might
+ // entirely avoid temporary allocations until finalizing the buffer.
+ flatcc_builder_reset(&builder);
+ test_assert(0 == create_monster_bottom_up(&builder, 1));
+ buf = flatcc_builder_finalize_aligned_buffer(&builder, &size);
+ access_monster_buffer(buf);
+ flatcc_builder_aligned_free(buf);
+ flatcc_builder_reset(&builder);
+ create_monster_top_down(&builder);
+ buf = flatcc_builder_finalize_buffer(&builder, &size);
+ test_assert(0 == access_monster_buffer(buf));
+ flatcc_builder_free(buf);
+ // Eventually the builder must be cleaned up:
+ flatcc_builder_clear(&builder);
+
+ DebuggerPrint("The FlatBuffer was successfully created and accessed!\n");
+
+ return STATUS_SUCCESS;
+}
+
+VOID DriverUnload(_In_ struct _DRIVER_OBJECT *DriverObject) {
+ UNREFERENCED_PARAMETER(DriverObject);
+
+ DebuggerPrint("Bye.");
+}
+}
diff --git a/examples/flatbuffers_common_builder.h b/examples/flatbuffers_common_builder.h
new file mode 100644
index 0000000..a918e55
--- /dev/null
+++ b/examples/flatbuffers_common_builder.h
@@ -0,0 +1,685 @@
+#ifndef FLATBUFFERS_COMMON_BUILDER_H
+#define FLATBUFFERS_COMMON_BUILDER_H
+
+/* Generated by flatcc 0.6.2 FlatBuffers schema compiler for C by dvide.com */
+
+/* Common FlatBuffers build functionality for C. */
+
+#include "flatcc/flatcc_prologue.h"
+#ifndef FLATBUILDER_H
+#include "flatcc/flatcc_builder.h"
+#endif
+typedef flatcc_builder_t flatbuffers_builder_t;
+typedef flatcc_builder_ref_t flatbuffers_ref_t;
+typedef flatcc_builder_ref_t flatbuffers_vec_ref_t;
+typedef flatcc_builder_union_ref_t flatbuffers_union_ref_t;
+typedef flatcc_builder_union_vec_ref_t flatbuffers_union_vec_ref_t;
+/* integer return code (ref and ptr always fail on 0) */
+#define flatbuffers_failed(x) ((x) < 0)
+typedef flatbuffers_ref_t flatbuffers_root_t;
+#define flatbuffers_root(ref) ((flatbuffers_root_t)(ref))
+
+#define __flatbuffers_memoize_begin(B, src)\
+do { flatcc_builder_ref_t _ref; if ((_ref = flatcc_builder_refmap_find((B), (src)))) return _ref; } while (0)
+#define __flatbuffers_memoize_end(B, src, op) do { return flatcc_builder_refmap_insert((B), (src), (op)); } while (0)
+#define __flatbuffers_memoize(B, src, op) do { __flatbuffers_memoize_begin(B, src); __flatbuffers_memoize_end(B, src, op); } while (0)
+
+#define __flatbuffers_build_buffer(NS)\
+typedef NS ## ref_t NS ## buffer_ref_t;\
+static inline int NS ## buffer_start(NS ## builder_t *B, const NS ##fid_t fid)\
+{ return flatcc_builder_start_buffer(B, fid, 0, 0); }\
+static inline int NS ## buffer_start_with_size(NS ## builder_t *B, const NS ##fid_t fid)\
+{ return flatcc_builder_start_buffer(B, fid, 0, flatcc_builder_with_size); }\
+static inline int NS ## buffer_start_aligned(NS ## builder_t *B, NS ##fid_t fid, uint16_t block_align)\
+{ return flatcc_builder_start_buffer(B, fid, block_align, 0); }\
+static inline int NS ## buffer_start_aligned_with_size(NS ## builder_t *B, NS ##fid_t fid, uint16_t block_align)\
+{ return flatcc_builder_start_buffer(B, fid, block_align, flatcc_builder_with_size); }\
+static inline NS ## buffer_ref_t NS ## buffer_end(NS ## builder_t *B, NS ## ref_t root)\
+{ return flatcc_builder_end_buffer(B, root); }
+
+#define __flatbuffers_build_table_root(NS, N, FID, TFID)\
+static inline int N ## _start_as_root(NS ## builder_t *B)\
+{ return NS ## buffer_start(B, FID) ? -1 : N ## _start(B); }\
+static inline int N ## _start_as_root_with_size(NS ## builder_t *B)\
+{ return NS ## buffer_start_with_size(B, FID) ? -1 : N ## _start(B); }\
+static inline int N ## _start_as_typed_root(NS ## builder_t *B)\
+{ return NS ## buffer_start(B, TFID) ? -1 : N ## _start(B); }\
+static inline int N ## _start_as_typed_root_with_size(NS ## builder_t *B)\
+{ return NS ## buffer_start_with_size(B, TFID) ? -1 : N ## _start(B); }\
+static inline NS ## buffer_ref_t N ## _end_as_root(NS ## builder_t *B)\
+{ return NS ## buffer_end(B, N ## _end(B)); }\
+static inline NS ## buffer_ref_t N ## _end_as_typed_root(NS ## builder_t *B)\
+{ return NS ## buffer_end(B, N ## _end(B)); }\
+static inline NS ## buffer_ref_t N ## _create_as_root(NS ## builder_t *B __ ## N ## _formal_args)\
+{ if (NS ## buffer_start(B, FID)) return 0; return NS ## buffer_end(B, N ## _create(B __ ## N ## _call_args)); }\
+static inline NS ## buffer_ref_t N ## _create_as_root_with_size(NS ## builder_t *B __ ## N ## _formal_args)\
+{ if (NS ## buffer_start_with_size(B, FID)) return 0; return NS ## buffer_end(B, N ## _create(B __ ## N ## _call_args)); }\
+static inline NS ## buffer_ref_t N ## _create_as_typed_root(NS ## builder_t *B __ ## N ## _formal_args)\
+{ if (NS ## buffer_start(B, TFID)) return 0; return NS ## buffer_end(B, N ## _create(B __ ## N ## _call_args)); }\
+static inline NS ## buffer_ref_t N ## _create_as_typed_root_with_size(NS ## builder_t *B __ ## N ## _formal_args)\
+{ if (NS ## buffer_start_with_size(B, TFID)) return 0; return NS ## buffer_end(B, N ## _create(B __ ## N ## _call_args)); }\
+static inline NS ## buffer_ref_t N ## _clone_as_root(NS ## builder_t *B, N ## _table_t t)\
+{ if (NS ## buffer_start(B, FID)) return 0; return NS ## buffer_end(B, N ## _clone(B, t)); }\
+static inline NS ## buffer_ref_t N ## _clone_as_root_with_size(NS ## builder_t *B, N ## _table_t t)\
+{ if (NS ## buffer_start_with_size(B, FID)) return 0; return NS ## buffer_end(B, N ## _clone(B, t)); }\
+static inline NS ## buffer_ref_t N ## _clone_as_typed_root(NS ## builder_t *B, N ## _table_t t)\
+{ if (NS ## buffer_start(B, TFID)) return 0;return NS ## buffer_end(B, N ## _clone(B, t)); }\
+static inline NS ## buffer_ref_t N ## _clone_as_typed_root_with_size(NS ## builder_t *B, N ## _table_t t)\
+{ if (NS ## buffer_start_with_size(B, TFID)) return 0; return NS ## buffer_end(B, N ## _clone(B, t)); }
+
+#define __flatbuffers_build_table_prolog(NS, N, FID, TFID)\
+__flatbuffers_build_table_vector_ops(NS, N ## _vec, N)\
+__flatbuffers_build_table_root(NS, N, FID, TFID)
+
+#define __flatbuffers_build_struct_root(NS, N, A, FID, TFID)\
+static inline N ## _t *N ## _start_as_root(NS ## builder_t *B)\
+{ return NS ## buffer_start(B, FID) ? 0 : N ## _start(B); }\
+static inline N ## _t *N ## _start_as_root_with_size(NS ## builder_t *B)\
+{ return NS ## buffer_start_with_size(B, FID) ? 0 : N ## _start(B); }\
+static inline N ## _t *N ## _start_as_typed_root(NS ## builder_t *B)\
+{ return NS ## buffer_start(B, TFID) ? 0 : N ## _start(B); }\
+static inline N ## _t *N ## _start_as_typed_root_with_size(NS ## builder_t *B)\
+{ return NS ## buffer_start_with_size(B, TFID) ? 0 : N ## _start(B); }\
+static inline NS ## buffer_ref_t N ## _end_as_root(NS ## builder_t *B)\
+{ return NS ## buffer_end(B, N ## _end(B)); }\
+static inline NS ## buffer_ref_t N ## _end_as_typed_root(NS ## builder_t *B)\
+{ return NS ## buffer_end(B, N ## _end(B)); }\
+static inline NS ## buffer_ref_t N ## _end_pe_as_root(NS ## builder_t *B)\
+{ return NS ## buffer_end(B, N ## _end_pe(B)); }\
+static inline NS ## buffer_ref_t N ## _end_pe_as_typed_root(NS ## builder_t *B)\
+{ return NS ## buffer_end(B, N ## _end_pe(B)); }\
+static inline NS ## buffer_ref_t N ## _create_as_root(NS ## builder_t *B __ ## N ## _formal_args)\
+{ return flatcc_builder_create_buffer(B, FID, 0,\
+ N ## _create(B __ ## N ## _call_args), A, 0); }\
+static inline NS ## buffer_ref_t N ## _create_as_root_with_size(NS ## builder_t *B __ ## N ## _formal_args)\
+{ return flatcc_builder_create_buffer(B, FID, 0,\
+ N ## _create(B __ ## N ## _call_args), A, flatcc_builder_with_size); }\
+static inline NS ## buffer_ref_t N ## _create_as_typed_root(NS ## builder_t *B __ ## N ## _formal_args)\
+{ return flatcc_builder_create_buffer(B, TFID, 0,\
+ N ## _create(B __ ## N ## _call_args), A, 0); }\
+static inline NS ## buffer_ref_t N ## _create_as_typed_root_with_size(NS ## builder_t *B __ ## N ## _formal_args)\
+{ return flatcc_builder_create_buffer(B, TFID, 0,\
+ N ## _create(B __ ## N ## _call_args), A, flatcc_builder_with_size); }\
+static inline NS ## buffer_ref_t N ## _clone_as_root(NS ## builder_t *B, N ## _struct_t p)\
+{ return flatcc_builder_create_buffer(B, FID, 0, N ## _clone(B, p), A, 0); }\
+static inline NS ## buffer_ref_t N ## _clone_as_root_with_size(NS ## builder_t *B, N ## _struct_t p)\
+{ return flatcc_builder_create_buffer(B, FID, 0, N ## _clone(B, p), A, flatcc_builder_with_size); }\
+static inline NS ## buffer_ref_t N ## _clone_as_typed_root(NS ## builder_t *B, N ## _struct_t p)\
+{ return flatcc_builder_create_buffer(B, TFID, 0, N ## _clone(B, p), A, 0); }\
+static inline NS ## buffer_ref_t N ## _clone_as_typed_root_with_size(NS ## builder_t *B, N ## _struct_t p)\
+{ return flatcc_builder_create_buffer(B, TFID, 0, N ## _clone(B, p), A, flatcc_builder_with_size); }
+
+#define __flatbuffers_build_nested_table_root(NS, N, TN, FID, TFID)\
+static inline int N ## _start_as_root(NS ## builder_t *B)\
+{ return NS ## buffer_start(B, FID) ? -1 : TN ## _start(B); }\
+static inline int N ## _start_as_typed_root(NS ## builder_t *B)\
+{ return NS ## buffer_start(B, TFID) ? -1 : TN ## _start(B); }\
+static inline int N ## _end_as_root(NS ## builder_t *B)\
+{ return N ## _add(B, NS ## buffer_end(B, TN ## _end(B))); }\
+static inline int N ## _end_as_typed_root(NS ## builder_t *B)\
+{ return N ## _add(B, NS ## buffer_end(B, TN ## _end(B))); }\
+static inline int N ## _nest(NS ## builder_t *B, void *data, size_t size, uint16_t align)\
+{ return N ## _add(B, flatcc_builder_create_vector(B, data, size, 1,\
+ align ? align : 8, FLATBUFFERS_COUNT_MAX(1))); }\
+static inline int N ## _typed_nest(NS ## builder_t *B, void *data, size_t size, uint16_t align)\
+{ return N ## _add(B, flatcc_builder_create_vector(B, data, size, 1,\
+ align ? align : 8, FLATBUFFERS_COUNT_MAX(1))); }\
+static inline int N ## _clone_as_root(NS ## builder_t *B, TN ## _table_t t)\
+{ return N ## _add(B, TN ## _clone_as_root(B, t)); }\
+static inline int N ## _clone_as_typed_root(NS ## builder_t *B, TN ## _table_t t)\
+{ return N ## _add(B, TN ## _clone_as_typed_root(B, t)); }
+
+#define __flatbuffers_build_nested_struct_root(NS, N, TN, A, FID, TFID)\
+static inline TN ## _t *N ## _start_as_root(NS ## builder_t *B)\
+{ return NS ## buffer_start(B, FID) ? 0 : TN ## _start(B); }\
+static inline TN ## _t *N ## _start_as_typed_root(NS ## builder_t *B)\
+{ return NS ## buffer_start(B, FID) ? 0 : TN ## _start(B); }\
+static inline int N ## _end_as_root(NS ## builder_t *B)\
+{ return N ## _add(B, NS ## buffer_end(B, TN ## _end(B))); }\
+static inline int N ## _end_as_typed_root(NS ## builder_t *B)\
+{ return N ## _add(B, NS ## buffer_end(B, TN ## _end(B))); }\
+static inline int N ## _end_pe_as_root(NS ## builder_t *B)\
+{ return N ## _add(B, NS ## buffer_end(B, TN ## _end_pe(B))); }\
+static inline int N ## _create_as_root(NS ## builder_t *B __ ## TN ## _formal_args)\
+{ return N ## _add(B, flatcc_builder_create_buffer(B, FID, 0,\
+ TN ## _create(B __ ## TN ## _call_args), A, flatcc_builder_is_nested)); }\
+static inline int N ## _create_as_typed_root(NS ## builder_t *B __ ## TN ## _formal_args)\
+{ return N ## _add(B, flatcc_builder_create_buffer(B, TFID, 0,\
+ TN ## _create(B __ ## TN ## _call_args), A, flatcc_builder_is_nested)); }\
+static inline int N ## _nest(NS ## builder_t *B, void *data, size_t size, uint16_t align)\
+{ return N ## _add(B, flatcc_builder_create_vector(B, data, size, 1,\
+ align < A ? A : align, FLATBUFFERS_COUNT_MAX(1))); }\
+static inline int N ## _typed_nest(NS ## builder_t *B, void *data, size_t size, uint16_t align)\
+{ return N ## _add(B, flatcc_builder_create_vector(B, data, size, 1,\
+ align < A ? A : align, FLATBUFFERS_COUNT_MAX(1))); }\
+static inline int N ## _clone_as_root(NS ## builder_t *B, TN ## _struct_t p)\
+{ return N ## _add(B, TN ## _clone_as_root(B, p)); }\
+static inline int N ## _clone_as_typed_root(NS ## builder_t *B, TN ## _struct_t p)\
+{ return N ## _add(B, TN ## _clone_as_typed_root(B, p)); }
+
+#define __flatbuffers_build_vector_ops(NS, V, N, TN, T)\
+static inline T *V ## _extend(NS ## builder_t *B, size_t len)\
+{ return (T *)flatcc_builder_extend_vector(B, len); }\
+static inline T *V ## _append(NS ## builder_t *B, const T *data, size_t len)\
+{ return (T *)flatcc_builder_append_vector(B, data, len); }\
+static inline int V ## _truncate(NS ## builder_t *B, size_t len)\
+{ return flatcc_builder_truncate_vector(B, len); }\
+static inline T *V ## _edit(NS ## builder_t *B)\
+{ return (T *)flatcc_builder_vector_edit(B); }\
+static inline size_t V ## _reserved_len(NS ## builder_t *B)\
+{ return flatcc_builder_vector_count(B); }\
+static inline T *V ## _push(NS ## builder_t *B, const T *p)\
+{ T *_p; return (_p = (T *)flatcc_builder_extend_vector(B, 1)) ? (memcpy(_p, p, TN ## __size()), _p) : 0; }\
+static inline T *V ## _push_copy(NS ## builder_t *B, const T *p)\
+{ T *_p; return (_p = (T *)flatcc_builder_extend_vector(B, 1)) ? TN ## _copy(_p, p) : 0; }\
+static inline T *V ## _push_clone(NS ## builder_t *B, const T *p)\
+{ T *_p; return (_p = (T *)flatcc_builder_extend_vector(B, 1)) ? TN ## _copy(_p, p) : 0; }\
+static inline T *V ## _push_create(NS ## builder_t *B __ ## TN ## _formal_args)\
+{ T *_p; return (_p = (T *)flatcc_builder_extend_vector(B, 1)) ? TN ## _assign(_p __ ## TN ## _call_args) : 0; }
+
+#define __flatbuffers_build_vector(NS, N, T, S, A)\
+typedef NS ## ref_t N ## _vec_ref_t;\
+static inline int N ## _vec_start(NS ## builder_t *B)\
+{ return flatcc_builder_start_vector(B, S, A, FLATBUFFERS_COUNT_MAX(S)); }\
+static inline N ## _vec_ref_t N ## _vec_end_pe(NS ## builder_t *B)\
+{ return flatcc_builder_end_vector(B); }\
+static inline N ## _vec_ref_t N ## _vec_end(NS ## builder_t *B)\
+{ if (!NS ## is_native_pe()) { size_t i, n; T *p = (T *)flatcc_builder_vector_edit(B);\
+ for (i = 0, n = flatcc_builder_vector_count(B); i < n; ++i)\
+ { N ## _to_pe(N ## __ptr_add(p, i)); }} return flatcc_builder_end_vector(B); }\
+static inline N ## _vec_ref_t N ## _vec_create_pe(NS ## builder_t *B, const T *data, size_t len)\
+{ return flatcc_builder_create_vector(B, data, len, S, A, FLATBUFFERS_COUNT_MAX(S)); }\
+static inline N ## _vec_ref_t N ## _vec_create(NS ## builder_t *B, const T *data, size_t len)\
+{ if (!NS ## is_native_pe()) { size_t i; T *p; int ret = flatcc_builder_start_vector(B, S, A, FLATBUFFERS_COUNT_MAX(S)); if (ret) { return ret; }\
+ p = (T *)flatcc_builder_extend_vector(B, len); if (!p) return 0;\
+ for (i = 0; i < len; ++i) { N ## _copy_to_pe(N ## __ptr_add(p, i), N ## __const_ptr_add(data, i)); }\
+ return flatcc_builder_end_vector(B); } else return flatcc_builder_create_vector(B, data, len, S, A, FLATBUFFERS_COUNT_MAX(S)); }\
+static inline N ## _vec_ref_t N ## _vec_clone(NS ## builder_t *B, N ##_vec_t vec)\
+{ __flatbuffers_memoize(B, vec, flatcc_builder_create_vector(B, vec, N ## _vec_len(vec), S, A, FLATBUFFERS_COUNT_MAX(S))); }\
+static inline N ## _vec_ref_t N ## _vec_slice(NS ## builder_t *B, N ##_vec_t vec, size_t index, size_t len)\
+{ size_t n = N ## _vec_len(vec); if (index >= n) index = n; n -= index; if (len > n) len = n;\
+ return flatcc_builder_create_vector(B, N ## __const_ptr_add(vec, index), len, S, A, FLATBUFFERS_COUNT_MAX(S)); }\
+__flatbuffers_build_vector_ops(NS, N ## _vec, N, N, T)
+
+#define __flatbuffers_build_union_vector_ops(NS, V, N, TN)\
+static inline TN ## _union_ref_t *V ## _extend(NS ## builder_t *B, size_t len)\
+{ return flatcc_builder_extend_union_vector(B, len); }\
+static inline TN ## _union_ref_t *V ## _append(NS ## builder_t *B, const TN ## _union_ref_t *data, size_t len)\
+{ return flatcc_builder_append_union_vector(B, data, len); }\
+static inline int V ## _truncate(NS ## builder_t *B, size_t len)\
+{ return flatcc_builder_truncate_union_vector(B, len); }\
+static inline TN ## _union_ref_t *V ## _edit(NS ## builder_t *B)\
+{ return (TN ## _union_ref_t *) flatcc_builder_union_vector_edit(B); }\
+static inline size_t V ## _reserved_len(NS ## builder_t *B)\
+{ return flatcc_builder_union_vector_count(B); }\
+static inline TN ## _union_ref_t *V ## _push(NS ## builder_t *B, const TN ## _union_ref_t ref)\
+{ return flatcc_builder_union_vector_push(B, ref); }\
+static inline TN ## _union_ref_t *V ## _push_clone(NS ## builder_t *B, TN ## _union_t u)\
+{ return TN ## _vec_push(B, TN ## _clone(B, u)); }
+
+#define __flatbuffers_build_union_vector(NS, N)\
+static inline int N ## _vec_start(NS ## builder_t *B)\
+{ return flatcc_builder_start_union_vector(B); }\
+static inline N ## _union_vec_ref_t N ## _vec_end(NS ## builder_t *B)\
+{ return flatcc_builder_end_union_vector(B); }\
+static inline N ## _union_vec_ref_t N ## _vec_create(NS ## builder_t *B, const N ## _union_ref_t *data, size_t len)\
+{ return flatcc_builder_create_union_vector(B, data, len); }\
+__flatbuffers_build_union_vector_ops(NS, N ## _vec, N, N)\
+/* Preserves DAG structure separately for type and value vector, so a type vector could be shared for many value vectors. */\
+static inline N ## _union_vec_ref_t N ## _vec_clone(NS ## builder_t *B, N ##_union_vec_t vec)\
+{ N ## _union_vec_ref_t _uvref, _ret = { 0, 0 }; NS ## union_ref_t _uref; size_t _i, _len;\
+ if (vec.type == 0) return _ret;\
+ _uvref.type = flatcc_builder_refmap_find(B, vec.type); _uvref.value = flatcc_builder_refmap_find(B, vec.value);\
+ _len = N ## _union_vec_len(vec); if (_uvref.type == 0) {\
+ _uvref.type = flatcc_builder_refmap_insert(B, vec.type, (flatcc_builder_create_type_vector(B, vec.type, _len))); }\
+ if (_uvref.type == 0) return _ret; if (_uvref.value == 0) {\
+ if (flatcc_builder_start_offset_vector(B)) return _ret;\
+ for (_i = 0; _i < _len; ++_i) { _uref = N ## _clone(B, N ## _union_vec_at(vec, _i));\
+ if (!_uref.value || !(flatcc_builder_offset_vector_push(B, _uref.value))) return _ret; }\
+ _uvref.value = flatcc_builder_refmap_insert(B, vec.value, flatcc_builder_end_offset_vector(B));\
+ if (_uvref.value == 0) return _ret; } return _uvref; }
+
+#define __flatbuffers_build_string_vector_ops(NS, N)\
+static inline int N ## _push_start(NS ## builder_t *B)\
+{ return NS ## string_start(B); }\
+static inline NS ## string_ref_t *N ## _push_end(NS ## builder_t *B)\
+{ return NS ## string_vec_push(B, NS ## string_end(B)); }\
+static inline NS ## string_ref_t *N ## _push_create(NS ## builder_t *B, const char *s, size_t len)\
+{ return NS ## string_vec_push(B, NS ## string_create(B, s, len)); }\
+static inline NS ## string_ref_t *N ## _push_create_str(NS ## builder_t *B, const char *s)\
+{ return NS ## string_vec_push(B, NS ## string_create_str(B, s)); }\
+static inline NS ## string_ref_t *N ## _push_create_strn(NS ## builder_t *B, const char *s, size_t max_len)\
+{ return NS ## string_vec_push(B, NS ## string_create_strn(B, s, max_len)); }\
+static inline NS ## string_ref_t *N ## _push_clone(NS ## builder_t *B, NS ## string_t string)\
+{ return NS ## string_vec_push(B, NS ## string_clone(B, string)); }\
+static inline NS ## string_ref_t *N ## _push_slice(NS ## builder_t *B, NS ## string_t string, size_t index, size_t len)\
+{ return NS ## string_vec_push(B, NS ## string_slice(B, string, index, len)); }
+
+#define __flatbuffers_build_table_vector_ops(NS, N, TN)\
+static inline int N ## _push_start(NS ## builder_t *B)\
+{ return TN ## _start(B); }\
+static inline TN ## _ref_t *N ## _push_end(NS ## builder_t *B)\
+{ return N ## _push(B, TN ## _end(B)); }\
+static inline TN ## _ref_t *N ## _push_create(NS ## builder_t *B __ ## TN ##_formal_args)\
+{ return N ## _push(B, TN ## _create(B __ ## TN ## _call_args)); }
+
+#define __flatbuffers_build_offset_vector_ops(NS, V, N, TN)\
+static inline TN ## _ref_t *V ## _extend(NS ## builder_t *B, size_t len)\
+{ return flatcc_builder_extend_offset_vector(B, len); }\
+static inline TN ## _ref_t *V ## _append(NS ## builder_t *B, const TN ## _ref_t *data, size_t len)\
+{ return flatcc_builder_append_offset_vector(B, data, len); }\
+static inline int V ## _truncate(NS ## builder_t *B, size_t len)\
+{ return flatcc_builder_truncate_offset_vector(B, len); }\
+static inline TN ## _ref_t *V ## _edit(NS ## builder_t *B)\
+{ return (TN ## _ref_t *)flatcc_builder_offset_vector_edit(B); }\
+static inline size_t V ## _reserved_len(NS ## builder_t *B)\
+{ return flatcc_builder_offset_vector_count(B); }\
+static inline TN ## _ref_t *V ## _push(NS ## builder_t *B, const TN ## _ref_t ref)\
+{ return ref ? flatcc_builder_offset_vector_push(B, ref) : 0; }
+
+#define __flatbuffers_build_offset_vector(NS, N)\
+typedef NS ## ref_t N ## _vec_ref_t;\
+static inline int N ## _vec_start(NS ## builder_t *B)\
+{ return flatcc_builder_start_offset_vector(B); }\
+static inline N ## _vec_ref_t N ## _vec_end(NS ## builder_t *B)\
+{ return flatcc_builder_end_offset_vector(B); }\
+static inline N ## _vec_ref_t N ## _vec_create(NS ## builder_t *B, const N ## _ref_t *data, size_t len)\
+{ return flatcc_builder_create_offset_vector(B, data, len); }\
+__flatbuffers_build_offset_vector_ops(NS, N ## _vec, N, N)\
+static inline N ## _vec_ref_t N ## _vec_clone(NS ## builder_t *B, N ##_vec_t vec)\
+{ int _ret; N ## _ref_t _e; size_t _i, _len; __flatbuffers_memoize_begin(B, vec);\
+ _len = N ## _vec_len(vec); if (flatcc_builder_start_offset_vector(B)) return 0;\
+ for (_i = 0; _i < _len; ++_i) { if (!(_e = N ## _clone(B, N ## _vec_at(vec, _i)))) return 0;\
+ if (!flatcc_builder_offset_vector_push(B, _e)) return 0; }\
+ __flatbuffers_memoize_end(B, vec, flatcc_builder_end_offset_vector(B)); }\
+
+#define __flatbuffers_build_string_ops(NS, N)\
+static inline char *N ## _append(NS ## builder_t *B, const char *s, size_t len)\
+{ return flatcc_builder_append_string(B, s, len); }\
+static inline char *N ## _append_str(NS ## builder_t *B, const char *s)\
+{ return flatcc_builder_append_string_str(B, s); }\
+static inline char *N ## _append_strn(NS ## builder_t *B, const char *s, size_t len)\
+{ return flatcc_builder_append_string_strn(B, s, len); }\
+static inline size_t N ## _reserved_len(NS ## builder_t *B)\
+{ return flatcc_builder_string_len(B); }\
+static inline char *N ## _extend(NS ## builder_t *B, size_t len)\
+{ return flatcc_builder_extend_string(B, len); }\
+static inline char *N ## _edit(NS ## builder_t *B)\
+{ return flatcc_builder_string_edit(B); }\
+static inline int N ## _truncate(NS ## builder_t *B, size_t len)\
+{ return flatcc_builder_truncate_string(B, len); }
+
+#define __flatbuffers_build_string(NS)\
+typedef NS ## ref_t NS ## string_ref_t;\
+static inline int NS ## string_start(NS ## builder_t *B)\
+{ return flatcc_builder_start_string(B); }\
+static inline NS ## string_ref_t NS ## string_end(NS ## builder_t *B)\
+{ return flatcc_builder_end_string(B); }\
+static inline NS ## ref_t NS ## string_create(NS ## builder_t *B, const char *s, size_t len)\
+{ return flatcc_builder_create_string(B, s, len); }\
+static inline NS ## ref_t NS ## string_create_str(NS ## builder_t *B, const char *s)\
+{ return flatcc_builder_create_string_str(B, s); }\
+static inline NS ## ref_t NS ## string_create_strn(NS ## builder_t *B, const char *s, size_t len)\
+{ return flatcc_builder_create_string_strn(B, s, len); }\
+static inline NS ## string_ref_t NS ## string_clone(NS ## builder_t *B, NS ## string_t string)\
+{ __flatbuffers_memoize(B, string, flatcc_builder_create_string(B, string, NS ## string_len(string))); }\
+static inline NS ## string_ref_t NS ## string_slice(NS ## builder_t *B, NS ## string_t string, size_t index, size_t len)\
+{ size_t n = NS ## string_len(string); if (index >= n) index = n; n -= index; if (len > n) len = n;\
+ return flatcc_builder_create_string(B, string + index, len); }\
+__flatbuffers_build_string_ops(NS, NS ## string)\
+__flatbuffers_build_offset_vector(NS, NS ## string)
+
+#define __flatbuffers_copy_from_pe(P, P2, N) (*(P) = N ## _read_from_pe(P2), (P))
+#define __flatbuffers_from_pe(P, N) (*(P) = N ## _read_from_pe(P), (P))
+#define __flatbuffers_copy_to_pe(P, P2, N) (N ## _write_to_pe((P), *(P2)), (P))
+#define __flatbuffers_to_pe(P, N) (N ## _write_to_pe((P), *(P)), (P))
+#define __flatbuffers_define_fixed_array_primitives(NS, N, T)\
+static inline T *N ## _array_copy(T *p, const T *p2, size_t n)\
+{ memcpy(p, p2, n * sizeof(T)); return p; }\
+static inline T *N ## _array_copy_from_pe(T *p, const T *p2, size_t n)\
+{ size_t i; if (NS ## is_native_pe()) memcpy(p, p2, n * sizeof(T)); else\
+ for (i = 0; i < n; ++i) N ## _copy_from_pe(&p[i], &p2[i]); return p; }\
+static inline T *N ## _array_copy_to_pe(T *p, const T *p2, size_t n)\
+{ size_t i; if (NS ## is_native_pe()) memcpy(p, p2, n * sizeof(T)); else\
+ for (i = 0; i < n; ++i) N ## _copy_to_pe(&p[i], &p2[i]); return p; }
+#define __flatbuffers_define_scalar_primitives(NS, N, T)\
+static inline T *N ## _from_pe(T *p) { return __ ## NS ## from_pe(p, N); }\
+static inline T *N ## _to_pe(T *p) { return __ ## NS ## to_pe(p, N); }\
+static inline T *N ## _copy(T *p, const T *p2) { *p = *p2; return p; }\
+static inline T *N ## _copy_from_pe(T *p, const T *p2)\
+{ return __ ## NS ## copy_from_pe(p, p2, N); }\
+static inline T *N ## _copy_to_pe(T *p, const T *p2) \
+{ return __ ## NS ## copy_to_pe(p, p2, N); }\
+static inline T *N ## _assign(T *p, const T v0) { *p = v0; return p; }\
+static inline T *N ## _assign_from_pe(T *p, T v0)\
+{ *p = N ## _read_from_pe(&v0); return p; }\
+static inline T *N ## _assign_to_pe(T *p, T v0)\
+{ N ## _write_to_pe(p, v0); return p; }
+#define __flatbuffers_build_scalar(NS, N, T)\
+__ ## NS ## define_scalar_primitives(NS, N, T)\
+__ ## NS ## define_fixed_array_primitives(NS, N, T)\
+__ ## NS ## build_vector(NS, N, T, sizeof(T), sizeof(T))
+/* Depends on generated copy_to/from_pe functions, and the type. */
+#define __flatbuffers_define_struct_primitives(NS, N)\
+static inline N ## _t *N ##_to_pe(N ## _t *p)\
+{ if (!NS ## is_native_pe()) { N ## _copy_to_pe(p, p); }; return p; }\
+static inline N ## _t *N ##_from_pe(N ## _t *p)\
+{ if (!NS ## is_native_pe()) { N ## _copy_from_pe(p, p); }; return p; }\
+static inline N ## _t *N ## _clear(N ## _t *p) { return (N ## _t *)memset(p, 0, N ## __size()); }
+
+/* Depends on generated copy/assign_to/from_pe functions, and the type. */
+#define __flatbuffers_build_struct(NS, N, S, A, FID, TFID)\
+__ ## NS ## define_struct_primitives(NS, N)\
+typedef NS ## ref_t N ## _ref_t;\
+static inline N ## _t *N ## _start(NS ## builder_t *B)\
+{ return (N ## _t *)flatcc_builder_start_struct(B, S, A); }\
+static inline N ## _ref_t N ## _end(NS ## builder_t *B)\
+{ if (!NS ## is_native_pe()) { N ## _to_pe((N ## _t *)flatcc_builder_struct_edit(B)); }\
+ return flatcc_builder_end_struct(B); }\
+static inline N ## _ref_t N ## _end_pe(NS ## builder_t *B)\
+{ return flatcc_builder_end_struct(B); }\
+static inline N ## _ref_t N ## _create(NS ## builder_t *B __ ## N ## _formal_args)\
+{ N ## _t *_p = N ## _start(B); if (!_p) return 0; N ##_assign_to_pe(_p __ ## N ## _call_args);\
+ return N ## _end_pe(B); }\
+static inline N ## _ref_t N ## _clone(NS ## builder_t *B, N ## _struct_t p)\
+{ N ## _t *_p; __flatbuffers_memoize_begin(B, p); _p = N ## _start(B); if (!_p) return 0;\
+ N ## _copy(_p, p); __flatbuffers_memoize_end(B, p, N ##_end_pe(B)); }\
+__flatbuffers_build_vector(NS, N, N ## _t, S, A)\
+__flatbuffers_build_struct_root(NS, N, A, FID, TFID)\
+
+#define __flatbuffers_struct_clear_field(p) memset((p), 0, sizeof(*(p)))
+#define __flatbuffers_build_table(NS, N, K)\
+static inline int N ## _start(NS ## builder_t *B)\
+{ return flatcc_builder_start_table(B, K); }\
+static inline N ## _ref_t N ## _end(NS ## builder_t *B)\
+{ FLATCC_ASSERT(flatcc_builder_check_required(B, __ ## N ## _required,\
+ sizeof(__ ## N ## _required) / sizeof(__ ## N ## _required[0]) - 1));\
+ return flatcc_builder_end_table(B); }\
+__flatbuffers_build_offset_vector(NS, N)
+
+#define __flatbuffers_build_table_field(ID, NS, N, TN, TT)\
+static inline int N ## _add(NS ## builder_t *B, TN ## _ref_t ref)\
+{ TN ## _ref_t *_p; return (ref && (_p = flatcc_builder_table_add_offset(B, ID))) ?\
+ ((*_p = ref), 0) : -1; }\
+static inline int N ## _start(NS ## builder_t *B)\
+{ return TN ## _start(B); }\
+static inline int N ## _end(NS ## builder_t *B)\
+{ return N ## _add(B, TN ## _end(B)); }\
+static inline TN ## _ref_t N ## _create(NS ## builder_t *B __ ## TN ##_formal_args)\
+{ return N ## _add(B, TN ## _create(B __ ## TN ## _call_args)); }\
+static inline int N ## _clone(NS ## builder_t *B, TN ## _table_t p)\
+{ return N ## _add(B, TN ## _clone(B, p)); }\
+static inline int N ## _pick(NS ## builder_t *B, TT ## _table_t t)\
+{ TN ## _table_t _p = N ## _get(t); return _p ? N ## _clone(B, _p) : 0; }
+
+#define __flatbuffers_build_union_field(ID, NS, N, TN, TT)\
+static inline int N ## _add(NS ## builder_t *B, TN ## _union_ref_t uref)\
+{ NS ## ref_t *_p; TN ## _union_type_t *_pt; if (uref.type == TN ## _NONE) return 0; if (uref.value == 0) return -1;\
+ if (!(_pt = (TN ## _union_type_t *)flatcc_builder_table_add(B, ID - 1, sizeof(*_pt), sizeof(*_pt)))) return -1;\
+ *_pt = uref.type; if (!(_p = flatcc_builder_table_add_offset(B, ID))) return -1; *_p = uref.value; return 0; }\
+static inline int N ## _add_type(NS ## builder_t *B, TN ## _union_type_t type)\
+{ TN ## _union_type_t *_pt; if (type == TN ## _NONE) return 0; return (_pt = (TN ## _union_type_t *)flatcc_builder_table_add(B, ID - 1,\
+ sizeof(*_pt), sizeof(*_pt))) ? ((*_pt = type), 0) : -1; }\
+static inline int N ## _add_value(NS ## builder_t *B, TN ## _union_ref_t uref)\
+{ NS ## ref_t *p; if (uref.type == TN ## _NONE) return 0; return (p = flatcc_builder_table_add_offset(B, ID)) ?\
+ ((*p = uref.value), 0) : -1; }\
+static inline int N ## _clone(NS ## builder_t *B, TN ## _union_t p)\
+{ return N ## _add(B, TN ## _clone(B, p)); }\
+static inline int N ## _pick(NS ## builder_t *B, TT ## _table_t t)\
+{ TN ## _union_t _p = N ## _union(t); return _p.type ? N ## _clone(B, _p) : 0; }
+
+/* M is the union value name and T is its type, i.e. the qualified name. */
+#define __flatbuffers_build_union_table_value_field(NS, N, NU, M, T)\
+static inline int N ## _ ## M ## _add(NS ## builder_t *B, T ## _ref_t ref)\
+{ return N ## _add(B, NU ## _as_ ## M (ref)); }\
+static inline int N ## _ ## M ## _start(NS ## builder_t *B)\
+{ return T ## _start(B); }\
+static inline int N ## _ ## M ## _end(NS ## builder_t *B)\
+{ T ## _ref_t ref = T ## _end(B);\
+ return ref ? N ## _ ## M ## _add(B, ref) : -1; }\
+static inline int N ## _ ## M ## _create(NS ## builder_t *B __ ## T ##_formal_args)\
+{ T ## _ref_t ref = T ## _create(B __ ## T ## _call_args);\
+ return ref ? N ## _add(B, NU ## _as_ ## M(ref)) : -1; }\
+static inline int N ## _ ## M ## _clone(NS ## builder_t *B, T ## _table_t t)\
+{ T ## _ref_t ref = T ## _clone(B, t);\
+ return ref ? N ## _add(B, NU ## _as_ ## M(ref)) : -1; }
+
+/* M is the union value name and T is its type, i.e. the qualified name. */
+#define __flatbuffers_build_union_struct_value_field(NS, N, NU, M, T)\
+static inline int N ## _ ## M ## _add(NS ## builder_t *B, T ## _ref_t ref)\
+{ return N ## _add(B, NU ## _as_ ## M (ref)); }\
+static inline T ## _t *N ## _ ## M ## _start(NS ## builder_t *B)\
+{ return T ## _start(B); }\
+static inline int N ## _ ## M ## _end(NS ## builder_t *B)\
+{ T ## _ref_t ref = T ## _end(B);\
+ return ref ? N ## _ ## M ## _add(B, ref) : -1; }\
+static inline int N ## _ ## M ## _create(NS ## builder_t *B __ ## T ##_formal_args)\
+{ T ## _ref_t ref = T ## _create(B __ ## T ## _call_args);\
+ return ref ? N ## _add(B, NU ## _as_ ## M(ref)) : -1; }\
+static inline int N ## _ ## M ## _end_pe(NS ## builder_t *B)\
+{ T ## _ref_t ref = T ## _end_pe(B);\
+ return ref ? N ## _add(B, NU ## _as_ ## M(ref)) : -1; }\
+static inline int N ## _ ## M ## _clone(NS ## builder_t *B, T ## _struct_t p)\
+{ T ## _ref_t ref = T ## _clone(B, p);\
+ return ref ? N ## _add(B, NU ## _as_ ## M(ref)) : -1; }
+#define __flatbuffers_build_union_string_value_field(NS, N, NU, M)\
+static inline int N ## _ ## M ## _add(NS ## builder_t *B, NS ## string_ref_t ref)\
+{ return N ## _add(B, NU ## _as_ ## M (ref)); }\
+__flatbuffers_build_string_field_ops(NS, N ## _ ## M)
+
+/* NS: common namespace, ID: table field id (not offset), TN: name of type T, TT: name of table type
+ * S: sizeof of scalar type, A: alignment of type T, default value V of type T. */
+#define __flatbuffers_build_scalar_field(ID, NS, N, TN, T, S, A, V, TT)\
+static inline int N ## _add(NS ## builder_t *B, const T v)\
+{ T *_p; if (v == V) return 0; if (!(_p = (T *)flatcc_builder_table_add(B, ID, S, A))) return -1;\
+ TN ## _assign_to_pe(_p, v); return 0; }\
+static inline int N ## _force_add(NS ## builder_t *B, const T v)\
+{ T *_p; if (!(_p = (T *)flatcc_builder_table_add(B, ID, S, A))) return -1;\
+ TN ## _assign_to_pe(_p, v); return 0; }\
+/* Clone does not skip default values and expects pe endian content. */\
+static inline int N ## _clone(NS ## builder_t *B, const T *p)\
+{ return 0 == flatcc_builder_table_add_copy(B, ID, p, S, A) ? -1 : 0; }\
+/* Transferring a missing field is a nop success with 0 as result. */\
+static inline int N ## _pick(NS ## builder_t *B, TT ## _table_t t)\
+{ const T *_p = N ## _get_ptr(t); return _p ? N ## _clone(B, _p) : 0; }
+
+/* NS: common namespace, ID: table field id (not offset), TN: name of type T, TT: name of table type
+ * S: sizeof of scalar type, A: alignment of type T. */
+#define __flatbuffers_build_scalar_optional_field(ID, NS, N, TN, T, S, A, TT)\
+static inline int N ## _add(NS ## builder_t *B, const T v)\
+{ T *_p; if (!(_p = (T *)flatcc_builder_table_add(B, ID, S, A))) return -1;\
+ TN ## _assign_to_pe(_p, v); return 0; }\
+/* Clone does not skip default values and expects pe endian content. */\
+static inline int N ## _clone(NS ## builder_t *B, const T *p)\
+{ return 0 == flatcc_builder_table_add_copy(B, ID, p, S, A) ? -1 : 0; }\
+/* Transferring a missing field is a nop success with 0 as result. */\
+static inline int N ## _pick(NS ## builder_t *B, TT ## _table_t t)\
+{ const T *_p = N ## _get_ptr(t); return _p ? N ## _clone(B, _p) : 0; }
+
+#define __flatbuffers_build_struct_field(ID, NS, N, TN, S, A, TT)\
+static inline TN ## _t *N ## _start(NS ## builder_t *B)\
+{ return (TN ## _t *)flatcc_builder_table_add(B, ID, S, A); }\
+static inline int N ## _end(NS ## builder_t *B)\
+{ if (!NS ## is_native_pe()) { TN ## _to_pe((TN ## _t *)flatcc_builder_table_edit(B, S)); } return 0; }\
+static inline int N ## _end_pe(NS ## builder_t *B) { return 0; }\
+static inline int N ## _create(NS ## builder_t *B __ ## TN ## _formal_args)\
+{ TN ## _t *_p = N ## _start(B); if (!_p) return -1; TN ##_assign_to_pe(_p __ ## TN ## _call_args);\
+ return 0; }\
+static inline int N ## _add(NS ## builder_t *B, const TN ## _t *p)\
+{ TN ## _t *_p = N ## _start(B); if (!_p) return -1; TN ##_copy_to_pe(_p, p); return 0; }\
+static inline int N ## _clone(NS ## builder_t *B, TN ## _struct_t p)\
+{ return 0 == flatcc_builder_table_add_copy(B, ID, p, S, A) ? -1 : 0; }\
+static inline int N ## _pick(NS ## builder_t *B, TT ## _table_t t)\
+{ TN ## _struct_t _p = N ## _get(t); return _p ? N ## _clone(B, _p) : 0; }
+
+#define __flatbuffers_build_vector_field(ID, NS, N, TN, T, TT)\
+static inline int N ## _add(NS ## builder_t *B, TN ## _vec_ref_t ref)\
+{ TN ## _vec_ref_t *_p; return (ref && (_p = flatcc_builder_table_add_offset(B, ID))) ? ((*_p = ref), 0) : -1; }\
+static inline int N ## _start(NS ## builder_t *B)\
+{ return TN ## _vec_start(B); }\
+static inline int N ## _end_pe(NS ## builder_t *B)\
+{ return N ## _add(B, TN ## _vec_end_pe(B)); }\
+static inline int N ## _end(NS ## builder_t *B)\
+{ return N ## _add(B, TN ## _vec_end(B)); }\
+static inline int N ## _create_pe(NS ## builder_t *B, const T *data, size_t len)\
+{ return N ## _add(B, TN ## _vec_create_pe(B, data, len)); }\
+static inline int N ## _create(NS ## builder_t *B, const T *data, size_t len)\
+{ return N ## _add(B, TN ## _vec_create(B, data, len)); }\
+static inline int N ## _slice(NS ## builder_t *B, TN ## _vec_t vec, size_t index, size_t len)\
+{ return N ## _add(B, TN ## _vec_slice(B, vec, index, len)); }\
+static inline int N ## _clone(NS ## builder_t *B, TN ## _vec_t vec)\
+{ return N ## _add(B, TN ## _vec_clone(B, vec)); }\
+static inline int N ## _pick(NS ## builder_t *B, TT ## _table_t t)\
+{ TN ## _vec_t _p = N ## _get(t); return _p ? N ## _clone(B, _p) : 0; }\
+__flatbuffers_build_vector_ops(NS, N, N, TN, T)\
+
+#define __flatbuffers_build_offset_vector_field(ID, NS, N, TN, TT)\
+static inline int N ## _add(NS ## builder_t *B, TN ## _vec_ref_t ref)\
+{ TN ## _vec_ref_t *_p; return (ref && (_p = flatcc_builder_table_add_offset(B, ID))) ? ((*_p = ref), 0) : -1; }\
+static inline int N ## _start(NS ## builder_t *B)\
+{ return flatcc_builder_start_offset_vector(B); }\
+static inline int N ## _end(NS ## builder_t *B)\
+{ return N ## _add(B, flatcc_builder_end_offset_vector(B)); }\
+static inline int N ## _create(NS ## builder_t *B, const TN ## _ref_t *data, size_t len)\
+{ return N ## _add(B, flatcc_builder_create_offset_vector(B, data, len)); }\
+__flatbuffers_build_offset_vector_ops(NS, N, N, TN)\
+static inline int N ## _clone(NS ## builder_t *B, TN ## _vec_t vec)\
+{ return N ## _add(B, TN ## _vec_clone(B, vec)); }\
+static inline int N ## _pick(NS ## builder_t *B, TT ## _table_t t)\
+{ TN ## _vec_t _p = N ## _get(t); return _p ? N ## _clone(B, _p) : 0; }
+
+/* depends on N ## _add which differs for union member fields and ordinary fields */\
+#define __flatbuffers_build_string_field_ops(NS, N)\
+static inline int N ## _start(NS ## builder_t *B)\
+{ return flatcc_builder_start_string(B); }\
+static inline int N ## _end(NS ## builder_t *B)\
+{ return N ## _add(B, flatcc_builder_end_string(B)); }\
+static inline int N ## _create(NS ## builder_t *B, const char *s, size_t len)\
+{ return N ## _add(B, flatcc_builder_create_string(B, s, len)); }\
+static inline int N ## _create_str(NS ## builder_t *B, const char *s)\
+{ return N ## _add(B, flatcc_builder_create_string_str(B, s)); }\
+static inline int N ## _create_strn(NS ## builder_t *B, const char *s, size_t max_len)\
+{ return N ## _add(B, flatcc_builder_create_string_strn(B, s, max_len)); }\
+static inline int N ## _clone(NS ## builder_t *B, NS ## string_t string)\
+{ return N ## _add(B, NS ## string_clone(B, string)); }\
+static inline int N ## _slice(NS ## builder_t *B, NS ## string_t string, size_t index, size_t len)\
+{ return N ## _add(B, NS ## string_slice(B, string, index, len)); }\
+__flatbuffers_build_string_ops(NS, N)
+
+#define __flatbuffers_build_string_field(ID, NS, N, TT)\
+static inline int N ## _add(NS ## builder_t *B, NS ## string_ref_t ref)\
+{ NS ## string_ref_t *_p; return (ref && (_p = flatcc_builder_table_add_offset(B, ID))) ? ((*_p = ref), 0) : -1; }\
+__flatbuffers_build_string_field_ops(NS, N)\
+static inline int N ## _pick(NS ## builder_t *B, TT ## _table_t t)\
+{ NS ## string_t _p = N ## _get(t); return _p ? N ## _clone(B, _p) : 0; }
+
+#define __flatbuffers_build_table_vector_field(ID, NS, N, TN, TT)\
+__flatbuffers_build_offset_vector_field(ID, NS, N, TN, TT)\
+__flatbuffers_build_table_vector_ops(NS, N, TN)
+
+#define __flatbuffers_build_union_vector_field(ID, NS, N, TN, TT)\
+static inline int N ## _add(NS ## builder_t *B, TN ## _union_vec_ref_t uvref)\
+{ NS ## vec_ref_t *_p; if (!uvref.type || !uvref.value) return uvref.type == uvref.value ? 0 : -1;\
+ if (!(_p = flatcc_builder_table_add_offset(B, ID - 1))) return -1; *_p = uvref.type;\
+ if (!(_p = flatcc_builder_table_add_offset(B, ID))) return -1; *_p = uvref.value; return 0; }\
+static inline int N ## _start(NS ## builder_t *B)\
+{ return flatcc_builder_start_union_vector(B); }\
+static inline int N ## _end(NS ## builder_t *B)\
+{ return N ## _add(B, flatcc_builder_end_union_vector(B)); }\
+static inline int N ## _create(NS ## builder_t *B, const TN ## _union_ref_t *data, size_t len)\
+{ return N ## _add(B, flatcc_builder_create_union_vector(B, data, len)); }\
+__flatbuffers_build_union_vector_ops(NS, N, N, TN)\
+static inline int N ## _clone(NS ## builder_t *B, TN ## _union_vec_t vec)\
+{ return N ## _add(B, TN ## _vec_clone(B, vec)); }\
+static inline int N ## _pick(NS ## builder_t *B, TT ## _table_t t)\
+{ TN ## _union_vec_t _p = N ## _union(t); return _p.type ? N ## _clone(B, _p) : 0; }
+
+#define __flatbuffers_build_union_table_vector_value_field(NS, N, NU, M, T)\
+static inline int N ## _ ## M ## _push_start(NS ## builder_t *B)\
+{ return T ## _start(B); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_end(NS ## builder_t *B)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M (T ## _end(B))); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push(NS ## builder_t *B, T ## _ref_t ref)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M (ref)); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_create(NS ## builder_t *B __ ## T ##_formal_args)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M(T ## _create(B __ ## T ## _call_args))); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_clone(NS ## builder_t *B, T ## _table_t t)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M(T ## _clone(B, t))); }
+
+#define __flatbuffers_build_union_struct_vector_value_field(NS, N, NU, M, T)\
+static inline T ## _t *N ## _ ## M ## _push_start(NS ## builder_t *B)\
+{ return T ## _start(B); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_end(NS ## builder_t *B)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M (T ## _end(B))); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push(NS ## builder_t *B, T ## _ref_t ref)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M (ref)); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_create(NS ## builder_t *B __ ## T ##_formal_args)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M(T ## _create(B __ ## T ## _call_args))); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_clone(NS ## builder_t *B, T ## _struct_t p)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M(T ## _clone(B, p))); }
+
+#define __flatbuffers_build_union_string_vector_value_field(NS, N, NU, M)\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push(NS ## builder_t *B, NS ## string_ref_t ref)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M (ref)); }\
+static inline int N ## _ ## M ## _push_start(NS ## builder_t *B)\
+{ return NS ## string_start(B); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_end(NS ## builder_t *B)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M(NS ## string_end(B))); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_create(NS ## builder_t *B, const char *s, size_t len)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M(NS ## string_create(B, s, len))); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_create_str(NS ## builder_t *B, const char *s)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M(NS ## string_create_str(B, s))); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_create_strn(NS ## builder_t *B, const char *s, size_t max_len)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M(NS ## string_create_strn(B, s, max_len))); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_clone(NS ## builder_t *B, NS ## string_t string)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M(NS ## string_clone(B, string))); }\
+static inline NU ## _union_ref_t *N ## _ ## M ## _push_slice(NS ## builder_t *B, NS ## string_t string, size_t index, size_t len)\
+{ return NU ## _vec_push(B, NU ## _as_ ## M(NS ## string_slice(B, string, index, len))); }
+
+#define __flatbuffers_build_string_vector_field(ID, NS, N, TT)\
+__flatbuffers_build_offset_vector_field(ID, NS, N, NS ## string, TT)\
+__flatbuffers_build_string_vector_ops(NS, N)
+
+#define __flatbuffers_char_formal_args , char v0
+#define __flatbuffers_char_call_args , v0
+#define __flatbuffers_uint8_formal_args , uint8_t v0
+#define __flatbuffers_uint8_call_args , v0
+#define __flatbuffers_int8_formal_args , int8_t v0
+#define __flatbuffers_int8_call_args , v0
+#define __flatbuffers_bool_formal_args , flatbuffers_bool_t v0
+#define __flatbuffers_bool_call_args , v0
+#define __flatbuffers_uint16_formal_args , uint16_t v0
+#define __flatbuffers_uint16_call_args , v0
+#define __flatbuffers_uint32_formal_args , uint32_t v0
+#define __flatbuffers_uint32_call_args , v0
+#define __flatbuffers_uint64_formal_args , uint64_t v0
+#define __flatbuffers_uint64_call_args , v0
+#define __flatbuffers_int16_formal_args , int16_t v0
+#define __flatbuffers_int16_call_args , v0
+#define __flatbuffers_int32_formal_args , int32_t v0
+#define __flatbuffers_int32_call_args , v0
+#define __flatbuffers_int64_formal_args , int64_t v0
+#define __flatbuffers_int64_call_args , v0
+#define __flatbuffers_float_formal_args , float v0
+#define __flatbuffers_float_call_args , v0
+#define __flatbuffers_double_formal_args , double v0
+#define __flatbuffers_double_call_args , v0
+
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_char, char)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_uint8, uint8_t)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_int8, int8_t)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_bool, flatbuffers_bool_t)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_uint16, uint16_t)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_uint32, uint32_t)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_uint64, uint64_t)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_int16, int16_t)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_int32, int32_t)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_int64, int64_t)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_float, float)
+__flatbuffers_build_scalar(flatbuffers_, flatbuffers_double, double)
+
+__flatbuffers_build_string(flatbuffers_)
+
+__flatbuffers_build_buffer(flatbuffers_)
+#include "flatcc/flatcc_epilogue.h"
+#endif /* FLATBUFFERS_COMMON_BUILDER_H */
diff --git a/examples/flatbuffers_common_reader.h b/examples/flatbuffers_common_reader.h
new file mode 100644
index 0000000..2c5eb43
--- /dev/null
+++ b/examples/flatbuffers_common_reader.h
@@ -0,0 +1,578 @@
+#ifndef FLATBUFFERS_COMMON_READER_H
+#define FLATBUFFERS_COMMON_READER_H
+
+/* Generated by flatcc 0.6.2 FlatBuffers schema compiler for C by dvide.com */
+
+/* Common FlatBuffers read functionality for C. */
+
+#include "flatcc/flatcc_prologue.h"
+#include "flatcc/flatcc_flatbuffers.h"
+
+
+#define __flatbuffers_read_scalar_at_byteoffset(N, p, o) N ## _read_from_pe((uint8_t *)(p) + (o))
+#define __flatbuffers_read_scalar(N, p) N ## _read_from_pe(p)
+#define __flatbuffers_read_vt(ID, offset, t)\
+flatbuffers_voffset_t offset = 0;\
+{ flatbuffers_voffset_t id__tmp, *vt__tmp;\
+ FLATCC_ASSERT(t != 0 && "null pointer table access");\
+ id__tmp = ID;\
+ vt__tmp = (flatbuffers_voffset_t *)((uint8_t *)(t) -\
+ __flatbuffers_soffset_read_from_pe(t));\
+ if (__flatbuffers_voffset_read_from_pe(vt__tmp) >= sizeof(vt__tmp[0]) * (id__tmp + 3u)) {\
+ offset = __flatbuffers_voffset_read_from_pe(vt__tmp + id__tmp + 2);\
+ }\
+}
+#define __flatbuffers_field_present(ID, t) { __flatbuffers_read_vt(ID, offset__tmp, t) return offset__tmp != 0; }
+#define __flatbuffers_scalar_field(T, ID, t)\
+{\
+ __flatbuffers_read_vt(ID, offset__tmp, t)\
+ if (offset__tmp) {\
+ return (const T *)((uint8_t *)(t) + offset__tmp);\
+ }\
+ return 0;\
+}
+#define __flatbuffers_define_scalar_field(ID, N, NK, TK, T, V)\
+static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
+{ __flatbuffers_read_vt(ID, offset__tmp, t__tmp)\
+ return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(TK, t__tmp, offset__tmp) : V;\
+}\
+static inline T N ## _ ## NK(N ## _table_t t__tmp)\
+{ __flatbuffers_read_vt(ID, offset__tmp, t__tmp)\
+ return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(TK, t__tmp, offset__tmp) : V;\
+}\
+static inline const T *N ## _ ## NK ## _get_ptr(N ## _table_t t__tmp)\
+__flatbuffers_scalar_field(T, ID, t__tmp)\
+static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
+__flatbuffers_field_present(ID, t__tmp)\
+__flatbuffers_define_scan_by_scalar_field(N, NK, T)
+#define __flatbuffers_define_scalar_optional_field(ID, N, NK, TK, T, V)\
+__flatbuffers_define_scalar_field(ID, N, NK, TK, T, V)\
+static inline TK ## _option_t N ## _ ## NK ## _option(N ## _table_t t__tmp)\
+{ TK ## _option_t ret; __flatbuffers_read_vt(ID, offset__tmp, t__tmp)\
+ ret.is_null = offset__tmp == 0; ret.value = offset__tmp ?\
+ __flatbuffers_read_scalar_at_byteoffset(TK, t__tmp, offset__tmp) : V;\
+ return ret; }
+#define __flatbuffers_struct_field(T, ID, t, r)\
+{\
+ __flatbuffers_read_vt(ID, offset__tmp, t)\
+ if (offset__tmp) {\
+ return (T)((uint8_t *)(t) + offset__tmp);\
+ }\
+ FLATCC_ASSERT(!(r) && "required field missing");\
+ return 0;\
+}
+#define __flatbuffers_offset_field(T, ID, t, r, adjust)\
+{\
+ flatbuffers_uoffset_t *elem__tmp;\
+ __flatbuffers_read_vt(ID, offset__tmp, t)\
+ if (offset__tmp) {\
+ elem__tmp = (flatbuffers_uoffset_t *)((uint8_t *)(t) + offset__tmp);\
+ /* Add sizeof so C api can have raw access past header field. */\
+ return (T)((uint8_t *)(elem__tmp) + adjust +\
+ __flatbuffers_uoffset_read_from_pe(elem__tmp));\
+ }\
+ FLATCC_ASSERT(!(r) && "required field missing");\
+ return 0;\
+}
+#define __flatbuffers_vector_field(T, ID, t, r) __flatbuffers_offset_field(T, ID, t, r, sizeof(flatbuffers_uoffset_t))
+#define __flatbuffers_table_field(T, ID, t, r) __flatbuffers_offset_field(T, ID, t, r, 0)
+#define __flatbuffers_define_struct_field(ID, N, NK, T, r)\
+static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
+__flatbuffers_struct_field(T, ID, t__tmp, r)\
+static inline T N ## _ ## NK(N ## _table_t t__tmp)\
+__flatbuffers_struct_field(T, ID, t__tmp, r)\
+static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
+__flatbuffers_field_present(ID, t__tmp)
+#define __flatbuffers_define_vector_field(ID, N, NK, T, r)\
+static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
+__flatbuffers_vector_field(T, ID, t__tmp, r)\
+static inline T N ## _ ## NK(N ## _table_t t__tmp)\
+__flatbuffers_vector_field(T, ID, t__tmp, r)\
+static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
+__flatbuffers_field_present(ID, t__tmp)
+#define __flatbuffers_define_table_field(ID, N, NK, T, r)\
+static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
+__flatbuffers_table_field(T, ID, t__tmp, r)\
+static inline T N ## _ ## NK(N ## _table_t t__tmp)\
+__flatbuffers_table_field(T, ID, t__tmp, r)\
+static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
+__flatbuffers_field_present(ID, t__tmp)
+#define __flatbuffers_define_string_field(ID, N, NK, r)\
+static inline flatbuffers_string_t N ## _ ## NK ## _get(N ## _table_t t__tmp)\
+__flatbuffers_vector_field(flatbuffers_string_t, ID, t__tmp, r)\
+static inline flatbuffers_string_t N ## _ ## NK(N ## _table_t t__tmp)\
+__flatbuffers_vector_field(flatbuffers_string_t, ID, t__tmp, r)\
+static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
+__flatbuffers_field_present(ID, t__tmp)\
+__flatbuffers_define_scan_by_string_field(N, NK)
+#define __flatbuffers_vec_len(vec)\
+{ return (vec) ? (size_t)__flatbuffers_uoffset_read_from_pe((flatbuffers_uoffset_t *)vec - 1) : 0; }
+#define __flatbuffers_string_len(s) __flatbuffers_vec_len(s)
+static inline size_t flatbuffers_vec_len(const void *vec)
+__flatbuffers_vec_len(vec)
+#define __flatbuffers_scalar_vec_at(N, vec, i)\
+{ FLATCC_ASSERT(flatbuffers_vec_len(vec) > (i) && "index out of range");\
+ return __flatbuffers_read_scalar(N, &(vec)[i]); }
+#define __flatbuffers_struct_vec_at(vec, i)\
+{ FLATCC_ASSERT(flatbuffers_vec_len(vec) > (i) && "index out of range"); return (vec) + (i); }
+/* `adjust` skips past the header for string vectors. */
+#define __flatbuffers_offset_vec_at(T, vec, i, adjust)\
+{ const flatbuffers_uoffset_t *elem__tmp = (vec) + (i);\
+ FLATCC_ASSERT(flatbuffers_vec_len(vec) > (i) && "index out of range");\
+ return (T)((uint8_t *)(elem__tmp) + (size_t)__flatbuffers_uoffset_read_from_pe(elem__tmp) + (adjust)); }
+#define __flatbuffers_define_scalar_vec_len(N)\
+static inline size_t N ## _vec_len(N ##_vec_t vec__tmp)\
+{ return flatbuffers_vec_len(vec__tmp); }
+#define __flatbuffers_define_scalar_vec_at(N, T) \
+static inline T N ## _vec_at(N ## _vec_t vec__tmp, size_t i__tmp)\
+__flatbuffers_scalar_vec_at(N, vec__tmp, i__tmp)
+typedef const char *flatbuffers_string_t;
+static inline size_t flatbuffers_string_len(flatbuffers_string_t s)
+__flatbuffers_string_len(s)
+typedef const flatbuffers_uoffset_t *flatbuffers_string_vec_t;
+typedef flatbuffers_uoffset_t *flatbuffers_string_mutable_vec_t;
+static inline size_t flatbuffers_string_vec_len(flatbuffers_string_vec_t vec)
+__flatbuffers_vec_len(vec)
+static inline flatbuffers_string_t flatbuffers_string_vec_at(flatbuffers_string_vec_t vec, size_t i)
+__flatbuffers_offset_vec_at(flatbuffers_string_t, vec, i, sizeof(vec[0]))
+typedef const void *flatbuffers_generic_t;
+typedef void *flatbuffers_mutable_generic_t;
+static inline flatbuffers_string_t flatbuffers_string_cast_from_generic(const flatbuffers_generic_t p)
+{ return p ? ((const char *)p) + __flatbuffers_uoffset__size() : 0; }
+typedef const flatbuffers_uoffset_t *flatbuffers_generic_vec_t;
+typedef flatbuffers_uoffset_t *flatbuffers_generic_table_mutable_vec_t;
+static inline size_t flatbuffers_generic_vec_len(flatbuffers_generic_vec_t vec)
+__flatbuffers_vec_len(vec)
+static inline flatbuffers_generic_t flatbuffers_generic_vec_at(flatbuffers_generic_vec_t vec, size_t i)
+__flatbuffers_offset_vec_at(flatbuffers_generic_t, vec, i, 0)
+static inline flatbuffers_generic_t flatbuffers_generic_vec_at_as_string(flatbuffers_generic_vec_t vec, size_t i)
+__flatbuffers_offset_vec_at(flatbuffers_generic_t, vec, i, sizeof(vec[0]))
+typedef struct flatbuffers_union {
+ flatbuffers_union_type_t type;
+ flatbuffers_generic_t value;
+} flatbuffers_union_t;
+typedef struct flatbuffers_union_vec {
+ const flatbuffers_union_type_t *type;
+ const flatbuffers_uoffset_t *value;
+} flatbuffers_union_vec_t;
+typedef struct flatbuffers_mutable_union {
+ flatbuffers_union_type_t type;
+ flatbuffers_mutable_generic_t value;
+} flatbuffers_mutable_union_t;
+typedef struct flatbuffers_mutable_union_vec {
+ flatbuffers_union_type_t *type;
+ flatbuffers_uoffset_t *value;
+} flatbuffers_mutable_union_vec_t;
+static inline flatbuffers_mutable_union_t flatbuffers_mutable_union_cast(flatbuffers_union_t u__tmp)\
+{ flatbuffers_mutable_union_t mu = { u__tmp.type, (flatbuffers_mutable_generic_t)u__tmp.value };\
+ return mu; }
+static inline flatbuffers_mutable_union_vec_t flatbuffers_mutable_union_vec_cast(flatbuffers_union_vec_t uv__tmp)\
+{ flatbuffers_mutable_union_vec_t muv =\
+ { (flatbuffers_union_type_t *)uv__tmp.type, (flatbuffers_uoffset_t *)uv__tmp.value }; return muv; }
+#define __flatbuffers_union_type_field(ID, t)\
+{\
+ __flatbuffers_read_vt(ID, offset__tmp, t)\
+ return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(__flatbuffers_utype, t, offset__tmp) : 0;\
+}
+static inline flatbuffers_string_t flatbuffers_string_cast_from_union(const flatbuffers_union_t u__tmp)\
+{ return flatbuffers_string_cast_from_generic(u__tmp.value); }
+#define __flatbuffers_define_union_field(NS, ID, N, NK, T, r)\
+static inline T ## _union_type_t N ## _ ## NK ## _type_get(N ## _table_t t__tmp)\
+__## NS ## union_type_field(((ID) - 1), t__tmp)\
+static inline NS ## generic_t N ## _ ## NK ## _get(N ## _table_t t__tmp)\
+__## NS ## table_field(NS ## generic_t, ID, t__tmp, r)\
+static inline T ## _union_type_t N ## _ ## NK ## _type(N ## _table_t t__tmp)\
+__## NS ## union_type_field(((ID) - 1), t__tmp)\
+static inline NS ## generic_t N ## _ ## NK(N ## _table_t t__tmp)\
+__## NS ## table_field(NS ## generic_t, ID, t__tmp, r)\
+static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
+__## NS ## field_present(ID, t__tmp)\
+static inline T ## _union_t N ## _ ## NK ## _union(N ## _table_t t__tmp)\
+{ T ## _union_t u__tmp = { 0, 0 }; u__tmp.type = N ## _ ## NK ## _type_get(t__tmp);\
+ if (u__tmp.type == 0) return u__tmp; u__tmp.value = N ## _ ## NK ## _get(t__tmp); return u__tmp; }\
+static inline NS ## string_t N ## _ ## NK ## _as_string(N ## _table_t t__tmp)\
+{ return NS ## string_cast_from_generic(N ## _ ## NK ## _get(t__tmp)); }\
+
+#define __flatbuffers_define_union_vector_ops(NS, T)\
+static inline size_t T ## _union_vec_len(T ## _union_vec_t uv__tmp)\
+{ return NS ## vec_len(uv__tmp.type); }\
+static inline T ## _union_t T ## _union_vec_at(T ## _union_vec_t uv__tmp, size_t i__tmp)\
+{ T ## _union_t u__tmp = { 0, 0 }; size_t n__tmp = NS ## vec_len(uv__tmp.type);\
+ FLATCC_ASSERT(n__tmp > (i__tmp) && "index out of range"); u__tmp.type = uv__tmp.type[i__tmp];\
+ /* Unknown type is treated as NONE for schema evolution. */\
+ if (u__tmp.type == 0) return u__tmp;\
+ u__tmp.value = NS ## generic_vec_at(uv__tmp.value, i__tmp); return u__tmp; }\
+static inline NS ## string_t T ## _union_vec_at_as_string(T ## _union_vec_t uv__tmp, size_t i__tmp)\
+{ return (NS ## string_t) NS ## generic_vec_at_as_string(uv__tmp.value, i__tmp); }\
+
+#define __flatbuffers_define_union_vector(NS, T)\
+typedef NS ## union_vec_t T ## _union_vec_t;\
+typedef NS ## mutable_union_vec_t T ## _mutable_union_vec_t;\
+static inline T ## _mutable_union_vec_t T ## _mutable_union_vec_cast(T ## _union_vec_t u__tmp)\
+{ return NS ## mutable_union_vec_cast(u__tmp); }\
+__## NS ## define_union_vector_ops(NS, T)
+#define __flatbuffers_define_union(NS, T)\
+typedef NS ## union_t T ## _union_t;\
+typedef NS ## mutable_union_t T ## _mutable_union_t;\
+static inline T ## _mutable_union_t T ## _mutable_union_cast(T ## _union_t u__tmp)\
+{ return NS ## mutable_union_cast(u__tmp); }\
+__## NS ## define_union_vector(NS, T)
+#define __flatbuffers_define_union_vector_field(NS, ID, N, NK, T, r)\
+__## NS ## define_vector_field(ID - 1, N, NK ## _type, T ## _vec_t, r)\
+__## NS ## define_vector_field(ID, N, NK, flatbuffers_generic_vec_t, r)\
+static inline T ## _union_vec_t N ## _ ## NK ## _union(N ## _table_t t__tmp)\
+{ T ## _union_vec_t uv__tmp; uv__tmp.type = N ## _ ## NK ## _type_get(t__tmp);\
+ uv__tmp.value = N ## _ ## NK(t__tmp);\
+ FLATCC_ASSERT(NS ## vec_len(uv__tmp.type) == NS ## vec_len(uv__tmp.value)\
+ && "union vector type length mismatch"); return uv__tmp; }
+#include <string.h>
+static const size_t flatbuffers_not_found = (size_t)-1;
+static const size_t flatbuffers_end = (size_t)-1;
+#define __flatbuffers_identity(n) (n)
+#define __flatbuffers_min(a, b) ((a) < (b) ? (a) : (b))
+/* Subtraction doesn't work for unsigned types. */
+#define __flatbuffers_scalar_cmp(x, y, n) ((x) < (y) ? -1 : (x) > (y))
+static inline int __flatbuffers_string_n_cmp(flatbuffers_string_t v, const char *s, size_t n)
+{ size_t nv = flatbuffers_string_len(v); int x = strncmp(v, s, nv < n ? nv : n);
+ return x != 0 ? x : nv < n ? -1 : nv > n; }
+/* `n` arg unused, but needed by string find macro expansion. */
+static inline int __flatbuffers_string_cmp(flatbuffers_string_t v, const char *s, size_t n) { (void)n; return strcmp(v, s); }
+/* A = identity if searching scalar vectors rather than key fields. */
+/* Returns lowest matching index or not_found. */
+#define __flatbuffers_find_by_field(A, V, E, L, K, Kn, T, D)\
+{ T v__tmp; size_t a__tmp = 0, b__tmp, m__tmp; if (!(b__tmp = L(V))) { return flatbuffers_not_found; }\
+ --b__tmp;\
+ while (a__tmp < b__tmp) {\
+ m__tmp = a__tmp + ((b__tmp - a__tmp) >> 1);\
+ v__tmp = A(E(V, m__tmp));\
+ if ((D(v__tmp, (K), (Kn))) < 0) {\
+ a__tmp = m__tmp + 1;\
+ } else {\
+ b__tmp = m__tmp;\
+ }\
+ }\
+ if (a__tmp == b__tmp) {\
+ v__tmp = A(E(V, a__tmp));\
+ if (D(v__tmp, (K), (Kn)) == 0) {\
+ return a__tmp;\
+ }\
+ }\
+ return flatbuffers_not_found;\
+}
+#define __flatbuffers_find_by_scalar_field(A, V, E, L, K, T)\
+__flatbuffers_find_by_field(A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
+#define __flatbuffers_find_by_string_field(A, V, E, L, K)\
+__flatbuffers_find_by_field(A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
+#define __flatbuffers_find_by_string_n_field(A, V, E, L, K, Kn)\
+__flatbuffers_find_by_field(A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
+#define __flatbuffers_define_find_by_scalar_field(N, NK, TK)\
+static inline size_t N ## _vec_find_by_ ## NK(N ## _vec_t vec__tmp, TK key__tmp)\
+__flatbuffers_find_by_scalar_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, TK)
+#define __flatbuffers_define_scalar_find(N, T)\
+static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, T key__tmp)\
+__flatbuffers_find_by_scalar_field(__flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
+#define __flatbuffers_define_find_by_string_field(N, NK) \
+/* Note: find only works on vectors sorted by this field. */\
+static inline size_t N ## _vec_find_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
+__flatbuffers_find_by_string_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
+static inline size_t N ## _vec_find_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
+__flatbuffers_find_by_string_n_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)
+#define __flatbuffers_define_default_find_by_scalar_field(N, NK, TK)\
+static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, TK key__tmp)\
+{ return N ## _vec_find_by_ ## NK(vec__tmp, key__tmp); }
+#define __flatbuffers_define_default_find_by_string_field(N, NK) \
+static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, const char *s__tmp)\
+{ return N ## _vec_find_by_ ## NK(vec__tmp, s__tmp); }\
+static inline size_t N ## _vec_find_n(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
+{ return N ## _vec_find_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }
+/* A = identity if searching scalar vectors rather than key fields. */
+/* Returns lowest matching index or not_found. */
+#define __flatbuffers_scan_by_field(b, e, A, V, E, L, K, Kn, T, D)\
+{ T v__tmp; size_t i__tmp;\
+ for (i__tmp = b; i__tmp < e; ++i__tmp) {\
+ v__tmp = A(E(V, i__tmp));\
+ if (D(v__tmp, (K), (Kn)) == 0) {\
+ return i__tmp;\
+ }\
+ }\
+ return flatbuffers_not_found;\
+}
+#define __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, Kn, T, D)\
+{ T v__tmp; size_t i__tmp = e;\
+ while (i__tmp-- > b) {\
+ v__tmp = A(E(V, i__tmp));\
+ if (D(v__tmp, (K), (Kn)) == 0) {\
+ return i__tmp;\
+ }\
+ }\
+ return flatbuffers_not_found;\
+}
+#define __flatbuffers_scan_by_scalar_field(b, e, A, V, E, L, K, T)\
+__flatbuffers_scan_by_field(b, e, A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
+#define __flatbuffers_scan_by_string_field(b, e, A, V, E, L, K)\
+__flatbuffers_scan_by_field(b, e, A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
+#define __flatbuffers_scan_by_string_n_field(b, e, A, V, E, L, K, Kn)\
+__flatbuffers_scan_by_field(b, e, A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
+#define __flatbuffers_rscan_by_scalar_field(b, e, A, V, E, L, K, T)\
+__flatbuffers_rscan_by_field(b, e, A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
+#define __flatbuffers_rscan_by_string_field(b, e, A, V, E, L, K)\
+__flatbuffers_rscan_by_field(b, e, A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
+#define __flatbuffers_rscan_by_string_n_field(b, e, A, V, E, L, K, Kn)\
+__flatbuffers_rscan_by_field(b, e, A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
+#define __flatbuffers_define_scan_by_scalar_field(N, NK, T)\
+static inline size_t N ## _vec_scan_by_ ## NK(N ## _vec_t vec__tmp, T key__tmp)\
+__flatbuffers_scan_by_scalar_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
+static inline size_t N ## _vec_scan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
+__flatbuffers_scan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
+static inline size_t N ## _vec_rscan_by_ ## NK(N ## _vec_t vec__tmp, T key__tmp)\
+__flatbuffers_rscan_by_scalar_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
+static inline size_t N ## _vec_rscan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
+__flatbuffers_rscan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
+#define __flatbuffers_define_scalar_scan(N, T)\
+static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, T key__tmp)\
+__flatbuffers_scan_by_scalar_field(0, N ## _vec_len(vec__tmp), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
+static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
+__flatbuffers_scan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
+static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, T key__tmp)\
+__flatbuffers_rscan_by_scalar_field(0, N ## _vec_len(vec__tmp), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
+static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
+__flatbuffers_rscan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
+#define __flatbuffers_define_scan_by_string_field(N, NK) \
+static inline size_t N ## _vec_scan_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
+__flatbuffers_scan_by_string_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
+static inline size_t N ## _vec_scan_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
+__flatbuffers_scan_by_string_n_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
+static inline size_t N ## _vec_scan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
+__flatbuffers_scan_by_string_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
+static inline size_t N ## _vec_scan_ex_n_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
+__flatbuffers_scan_by_string_n_field(begin__tmp, __flatbuffers_min( end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
+static inline size_t N ## _vec_rscan_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
+__flatbuffers_rscan_by_string_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
+static inline size_t N ## _vec_rscan_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
+__flatbuffers_rscan_by_string_n_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
+static inline size_t N ## _vec_rscan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
+__flatbuffers_rscan_by_string_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
+static inline size_t N ## _vec_rscan_ex_n_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
+__flatbuffers_rscan_by_string_n_field(begin__tmp, __flatbuffers_min( end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)
+#define __flatbuffers_define_default_scan_by_scalar_field(N, NK, TK)\
+static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, TK key__tmp)\
+{ return N ## _vec_scan_by_ ## NK(vec__tmp, key__tmp); }\
+static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, TK key__tmp)\
+{ return N ## _vec_scan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, key__tmp); }\
+static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, TK key__tmp)\
+{ return N ## _vec_rscan_by_ ## NK(vec__tmp, key__tmp); }\
+static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, TK key__tmp)\
+{ return N ## _vec_rscan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, key__tmp); }
+#define __flatbuffers_define_default_scan_by_string_field(N, NK) \
+static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, const char *s__tmp)\
+{ return N ## _vec_scan_by_ ## NK(vec__tmp, s__tmp); }\
+static inline size_t N ## _vec_scan_n(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
+{ return N ## _vec_scan_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }\
+static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
+{ return N ## _vec_scan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp); }\
+static inline size_t N ## _vec_scan_ex_n(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
+{ return N ## _vec_scan_ex_n_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp, n__tmp); }\
+static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, const char *s__tmp)\
+{ return N ## _vec_rscan_by_ ## NK(vec__tmp, s__tmp); }\
+static inline size_t N ## _vec_rscan_n(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
+{ return N ## _vec_rscan_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }\
+static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
+{ return N ## _vec_rscan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp); }\
+static inline size_t N ## _vec_rscan_ex_n(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
+{ return N ## _vec_rscan_ex_n_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp, n__tmp); }
+#define __flatbuffers_heap_sort(N, X, A, E, L, TK, TE, D, S)\
+static inline void __ ## N ## X ## __heap_sift_down(\
+ N ## _mutable_vec_t vec__tmp, size_t start__tmp, size_t end__tmp)\
+{ size_t child__tmp, root__tmp; TK v1__tmp, v2__tmp, vroot__tmp;\
+ root__tmp = start__tmp;\
+ while ((root__tmp << 1) <= end__tmp) {\
+ child__tmp = root__tmp << 1;\
+ if (child__tmp < end__tmp) {\
+ v1__tmp = A(E(vec__tmp, child__tmp));\
+ v2__tmp = A(E(vec__tmp, child__tmp + 1));\
+ if (D(v1__tmp, v2__tmp) < 0) {\
+ child__tmp++;\
+ }\
+ }\
+ vroot__tmp = A(E(vec__tmp, root__tmp));\
+ v1__tmp = A(E(vec__tmp, child__tmp));\
+ if (D(vroot__tmp, v1__tmp) < 0) {\
+ S(vec__tmp, root__tmp, child__tmp, TE);\
+ root__tmp = child__tmp;\
+ } else {\
+ return;\
+ }\
+ }\
+}\
+static inline void __ ## N ## X ## __heap_sort(N ## _mutable_vec_t vec__tmp)\
+{ size_t start__tmp, end__tmp, size__tmp;\
+ size__tmp = L(vec__tmp); if (size__tmp == 0) return; end__tmp = size__tmp - 1; start__tmp = size__tmp >> 1;\
+ do { __ ## N ## X ## __heap_sift_down(vec__tmp, start__tmp, end__tmp); } while (start__tmp--);\
+ while (end__tmp > 0) { \
+ S(vec__tmp, 0, end__tmp, TE);\
+ __ ## N ## X ## __heap_sift_down(vec__tmp, 0, --end__tmp); } }
+#define __flatbuffers_define_sort_by_field(N, NK, TK, TE, D, S)\
+ __flatbuffers_heap_sort(N, _sort_by_ ## NK, N ## _ ## NK ## _get, N ## _vec_at, N ## _vec_len, TK, TE, D, S)\
+static inline void N ## _vec_sort_by_ ## NK(N ## _mutable_vec_t vec__tmp)\
+{ __ ## N ## _sort_by_ ## NK ## __heap_sort(vec__tmp); }
+#define __flatbuffers_define_sort(N, TK, TE, D, S)\
+__flatbuffers_heap_sort(N, , __flatbuffers_identity, N ## _vec_at, N ## _vec_len, TK, TE, D, S)\
+static inline void N ## _vec_sort(N ## _mutable_vec_t vec__tmp) { __ ## N ## __heap_sort(vec__tmp); }
+#define __flatbuffers_scalar_diff(x, y) ((x) < (y) ? -1 : (x) > (y))
+#define __flatbuffers_string_diff(x, y) __flatbuffers_string_n_cmp((x), (const char *)(y), flatbuffers_string_len(y))
+#define __flatbuffers_value_swap(vec, a, b, TE) { TE x__tmp = vec[b]; vec[b] = vec[a]; vec[a] = x__tmp; }
+#define __flatbuffers_uoffset_swap(vec, a, b, TE)\
+{ TE ta__tmp, tb__tmp, d__tmp;\
+ d__tmp = (TE)((a - b) * sizeof(vec[0]));\
+ ta__tmp = __flatbuffers_uoffset_read_from_pe(vec + b) - d__tmp;\
+ tb__tmp = __flatbuffers_uoffset_read_from_pe(vec + a) + d__tmp;\
+ __flatbuffers_uoffset_write_to_pe(vec + a, ta__tmp);\
+ __flatbuffers_uoffset_write_to_pe(vec + b, tb__tmp); }
+#define __flatbuffers_scalar_swap(vec, a, b, TE) __flatbuffers_value_swap(vec, a, b, TE)
+#define __flatbuffers_string_swap(vec, a, b, TE) __flatbuffers_uoffset_swap(vec, a, b, TE)
+#define __flatbuffers_struct_swap(vec, a, b, TE) __flatbuffers_value_swap(vec, a, b, TE)
+#define __flatbuffers_table_swap(vec, a, b, TE) __flatbuffers_uoffset_swap(vec, a, b, TE)
+#define __flatbuffers_define_struct_sort_by_scalar_field(N, NK, TK, TE)\
+ __flatbuffers_define_sort_by_field(N, NK, TK, TE, __flatbuffers_scalar_diff, __flatbuffers_struct_swap)
+#define __flatbuffers_define_table_sort_by_scalar_field(N, NK, TK)\
+ __flatbuffers_define_sort_by_field(N, NK, TK, flatbuffers_uoffset_t, __flatbuffers_scalar_diff, __flatbuffers_table_swap)
+#define __flatbuffers_define_table_sort_by_string_field(N, NK)\
+ __flatbuffers_define_sort_by_field(N, NK, flatbuffers_string_t, flatbuffers_uoffset_t, __flatbuffers_string_diff, __flatbuffers_table_swap)
+#define __flatbuffers_define_scalar_sort(N, T) __flatbuffers_define_sort(N, T, T, __flatbuffers_scalar_diff, __flatbuffers_scalar_swap)
+#define __flatbuffers_define_string_sort() __flatbuffers_define_sort(flatbuffers_string, flatbuffers_string_t, flatbuffers_uoffset_t, __flatbuffers_string_diff, __flatbuffers_string_swap)
+#define __flatbuffers_sort_vector_field(N, NK, T, t)\
+{ T ## _mutable_vec_t v__tmp = (T ## _mutable_vec_t) N ## _ ## NK ## _get(t);\
+ if (v__tmp) T ## _vec_sort(v__tmp); }
+#define __flatbuffers_sort_table_field(N, NK, T, t)\
+{ T ## _sort((T ## _mutable_table_t)N ## _ ## NK ## _get(t)); }
+#define __flatbuffers_sort_union_field(N, NK, T, t)\
+{ T ## _sort(T ## _mutable_union_cast(N ## _ ## NK ## _union(t))); }
+#define __flatbuffers_sort_table_vector_field_elements(N, NK, T, t)\
+{ T ## _vec_t v__tmp = N ## _ ## NK ## _get(t); size_t i__tmp, n__tmp;\
+ n__tmp = T ## _vec_len(v__tmp); for (i__tmp = 0; i__tmp < n__tmp; ++i__tmp) {\
+ T ## _sort((T ## _mutable_table_t)T ## _vec_at(v__tmp, i__tmp)); }}
+#define __flatbuffers_sort_union_vector_field_elements(N, NK, T, t)\
+{ T ## _union_vec_t v__tmp = N ## _ ## NK ## _union(t); size_t i__tmp, n__tmp;\
+ n__tmp = T ## _union_vec_len(v__tmp); for (i__tmp = 0; i__tmp < n__tmp; ++i__tmp) {\
+ T ## _sort(T ## _mutable_union_cast(T ## _union_vec_at(v__tmp, i__tmp))); }}
+#define __flatbuffers_define_scalar_vector(N, T)\
+typedef const T *N ## _vec_t;\
+typedef T *N ## _mutable_vec_t;\
+__flatbuffers_define_scalar_vec_len(N)\
+__flatbuffers_define_scalar_vec_at(N, T)\
+__flatbuffers_define_scalar_find(N, T)\
+__flatbuffers_define_scalar_scan(N, T)\
+__flatbuffers_define_scalar_sort(N, T)
+
+#define __flatbuffers_define_integer_type(N, T, W)\
+__flatcc_define_integer_accessors(N, T, W, flatbuffers_endian)\
+__flatbuffers_define_scalar_vector(N, T)
+__flatbuffers_define_scalar_vector(flatbuffers_bool, flatbuffers_bool_t)
+__flatbuffers_define_scalar_vector(flatbuffers_char, char)
+__flatbuffers_define_scalar_vector(flatbuffers_uint8, uint8_t)
+__flatbuffers_define_scalar_vector(flatbuffers_int8, int8_t)
+__flatbuffers_define_scalar_vector(flatbuffers_uint16, uint16_t)
+__flatbuffers_define_scalar_vector(flatbuffers_int16, int16_t)
+__flatbuffers_define_scalar_vector(flatbuffers_uint32, uint32_t)
+__flatbuffers_define_scalar_vector(flatbuffers_int32, int32_t)
+__flatbuffers_define_scalar_vector(flatbuffers_uint64, uint64_t)
+__flatbuffers_define_scalar_vector(flatbuffers_int64, int64_t)
+__flatbuffers_define_scalar_vector(flatbuffers_float, float)
+__flatbuffers_define_scalar_vector(flatbuffers_double, double)
+__flatbuffers_define_scalar_vector(flatbuffers_union_type, flatbuffers_union_type_t)
+static inline size_t flatbuffers_string_vec_find(flatbuffers_string_vec_t vec, const char *s)
+__flatbuffers_find_by_string_field(__flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
+static inline size_t flatbuffers_string_vec_find_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
+__flatbuffers_find_by_string_n_field(__flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
+static inline size_t flatbuffers_string_vec_scan(flatbuffers_string_vec_t vec, const char *s)
+__flatbuffers_scan_by_string_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
+static inline size_t flatbuffers_string_vec_scan_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
+__flatbuffers_scan_by_string_n_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
+static inline size_t flatbuffers_string_vec_scan_ex(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s)
+__flatbuffers_scan_by_string_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
+static inline size_t flatbuffers_string_vec_scan_ex_n(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s, size_t n)
+__flatbuffers_scan_by_string_n_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
+static inline size_t flatbuffers_string_vec_rscan(flatbuffers_string_vec_t vec, const char *s)
+__flatbuffers_rscan_by_string_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
+static inline size_t flatbuffers_string_vec_rscan_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
+__flatbuffers_rscan_by_string_n_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
+static inline size_t flatbuffers_string_vec_rscan_ex(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s)
+__flatbuffers_rscan_by_string_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
+static inline size_t flatbuffers_string_vec_rscan_ex_n(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s, size_t n)
+__flatbuffers_rscan_by_string_n_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
+__flatbuffers_define_string_sort()
+#define __flatbuffers_define_struct_scalar_fixed_array_field(N, NK, TK, T, L)\
+static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp, size_t i__tmp)\
+{ if (!t__tmp || i__tmp >= L) return 0;\
+ return __flatbuffers_read_scalar(TK, &(t__tmp->NK[i__tmp])); }\
+static inline const T *N ## _ ## NK ## _get_ptr(N ## _struct_t t__tmp)\
+{ return t__tmp ? t__tmp->NK : 0; }\
+static inline size_t N ## _ ## NK ## _get_len(void) { return L; }\
+static inline T N ## _ ## NK (N ## _struct_t t__tmp, size_t i__tmp)\
+{ return N ## _ ## NK ## _get(t__tmp, i__tmp); }
+#define __flatbuffers_define_struct_struct_fixed_array_field(N, NK, T, L)\
+static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp, size_t i__tmp)\
+{ if (!t__tmp || i__tmp >= L) return 0; return t__tmp->NK + i__tmp; }static inline T N ## _ ## NK ## _get_ptr(N ## _struct_t t__tmp)\
+{ return t__tmp ? t__tmp->NK : 0; }\
+static inline size_t N ## _ ## NK ## _get_len(void) { return L; }\
+static inline T N ## _ ## NK(N ## _struct_t t__tmp, size_t i__tmp)\
+{ if (!t__tmp || i__tmp >= L) return 0; return t__tmp->NK + i__tmp; }
+#define __flatbuffers_define_struct_scalar_field(N, NK, TK, T)\
+static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp)\
+{ return t__tmp ? __flatbuffers_read_scalar(TK, &(t__tmp->NK)) : 0; }\
+static inline const T *N ## _ ## NK ## _get_ptr(N ## _struct_t t__tmp)\
+{ return t__tmp ? &(t__tmp->NK) : 0; }\
+static inline T N ## _ ## NK (N ## _struct_t t__tmp)\
+{ return t__tmp ? __flatbuffers_read_scalar(TK, &(t__tmp->NK)) : 0; }\
+__flatbuffers_define_scan_by_scalar_field(N, NK, T)
+#define __flatbuffers_define_struct_struct_field(N, NK, T)\
+static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp) { return t__tmp ? &(t__tmp->NK) : 0; }\
+static inline T N ## _ ## NK (N ## _struct_t t__tmp) { return t__tmp ? &(t__tmp->NK) : 0; }
+/* If fid is null, the function returns true without testing as buffer is not expected to have any id. */
+static inline int flatbuffers_has_identifier(const void *buffer, const char *fid)
+{ flatbuffers_thash_t id, id2 = 0; if (fid == 0) { return 1; };
+ id2 = flatbuffers_type_hash_from_string(fid);
+ id = __flatbuffers_thash_read_from_pe(((flatbuffers_uoffset_t *)buffer) + 1);
+ return id2 == 0 || id == id2; }
+static inline int flatbuffers_has_type_hash(const void *buffer, flatbuffers_thash_t thash)
+{ return thash == 0 || (__flatbuffers_thash_read_from_pe((flatbuffers_uoffset_t *)buffer + 1) == thash); }
+
+static inline flatbuffers_thash_t flatbuffers_get_type_hash(const void *buffer)
+{ return __flatbuffers_thash_read_from_pe((flatbuffers_uoffset_t *)buffer + 1); }
+
+#define flatbuffers_verify_endian() flatbuffers_has_identifier("\x00\x00\x00\x00" "1234", "1234")
+static inline void *flatbuffers_read_size_prefix(void *b, size_t *size_out)
+{ if (size_out) { *size_out = (size_t)__flatbuffers_uoffset_read_from_pe(b); }
+ return (uint8_t *)b + sizeof(flatbuffers_uoffset_t); }
+/* Null file identifier accepts anything, otherwise fid should be 4 characters. */
+#define __flatbuffers_read_root(T, K, buffer, fid)\
+ ((!buffer || !flatbuffers_has_identifier(buffer, fid)) ? 0 :\
+ ((T ## _ ## K ## t)(((uint8_t *)buffer) +\
+ __flatbuffers_uoffset_read_from_pe(buffer))))
+#define __flatbuffers_read_typed_root(T, K, buffer, thash)\
+ ((!buffer || !flatbuffers_has_type_hash(buffer, thash)) ? 0 :\
+ ((T ## _ ## K ## t)(((uint8_t *)buffer) +\
+ __flatbuffers_uoffset_read_from_pe(buffer))))
+#define __flatbuffers_nested_buffer_as_root(C, N, T, K)\
+static inline T ## _ ## K ## t C ## _ ## N ## _as_root_with_identifier(C ## _ ## table_t t__tmp, const char *fid__tmp)\
+{ const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, fid__tmp); }\
+static inline T ## _ ## K ## t C ## _ ## N ## _as_typed_root(C ## _ ## table_t t__tmp)\
+{ const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, C ## _ ## type_identifier); }\
+static inline T ## _ ## K ## t C ## _ ## N ## _as_root(C ## _ ## table_t t__tmp)\
+{ const char *fid__tmp = T ## _file_identifier;\
+ const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, fid__tmp); }
+#define __flatbuffers_buffer_as_root(N, K)\
+static inline N ## _ ## K ## t N ## _as_root_with_identifier(const void *buffer__tmp, const char *fid__tmp)\
+{ return __flatbuffers_read_root(N, K, buffer__tmp, fid__tmp); }\
+static inline N ## _ ## K ## t N ## _as_root_with_type_hash(const void *buffer__tmp, flatbuffers_thash_t thash__tmp)\
+{ return __flatbuffers_read_typed_root(N, K, buffer__tmp, thash__tmp); }\
+static inline N ## _ ## K ## t N ## _as_root(const void *buffer__tmp)\
+{ const char *fid__tmp = N ## _file_identifier;\
+ return __flatbuffers_read_root(N, K, buffer__tmp, fid__tmp); }\
+static inline N ## _ ## K ## t N ## _as_typed_root(const void *buffer__tmp)\
+{ return __flatbuffers_read_typed_root(N, K, buffer__tmp, N ## _type_hash); }
+#define __flatbuffers_struct_as_root(N) __flatbuffers_buffer_as_root(N, struct_)
+#define __flatbuffers_table_as_root(N) __flatbuffers_buffer_as_root(N, table_)
+
+#include "flatcc/flatcc_epilogue.h"
+#endif /* FLATBUFFERS_COMMON_H */
diff --git a/examples/monster.fbs b/examples/monster.fbs
new file mode 100644
index 0000000..12859d2
--- /dev/null
+++ b/examples/monster.fbs
@@ -0,0 +1,32 @@
+// Example IDL file for our monster's schema.
+
+namespace MyGame.Sample;
+
+enum Color:byte { Red = 0, Green, Blue = 2 }
+
+union Equipment { Weapon } // Optionally add more tables.
+
+struct Vec3 {
+ x:float;
+ y:float;
+ z:float;
+}
+
+table Monster {
+ pos:Vec3; // Struct.
+ mana:short = 150;
+ hp:short = 100;
+ name:string;
+ friendly:bool = false (deprecated);
+ inventory:[ubyte]; // Vector of scalars.
+ color:Color = Blue; // Enum.
+ weapons:[Weapon]; // Vector of tables.
+ equipped:Equipment; // Union.
+}
+
+table Weapon {
+ name:string;
+ damage:short;
+}
+
+root_type Monster;
diff --git a/examples/monster_builder.h b/examples/monster_builder.h
new file mode 100644
index 0000000..1ab48a1
--- /dev/null
+++ b/examples/monster_builder.h
@@ -0,0 +1,160 @@
+#ifndef MONSTER_BUILDER_H
+#define MONSTER_BUILDER_H
+
+/* Generated by flatcc 0.6.2 FlatBuffers schema compiler for C by dvide.com */
+
+#ifndef MONSTER_READER_H
+#include "monster_reader.h"
+#endif
+#ifndef FLATBUFFERS_COMMON_BUILDER_H
+#include "flatbuffers_common_builder.h"
+#endif
+#include "flatcc/flatcc_prologue.h"
+#ifndef flatbuffers_identifier
+#define flatbuffers_identifier 0
+#endif
+#ifndef flatbuffers_extension
+#define flatbuffers_extension "bin"
+#endif
+
+#define __MyGame_Sample_Color_formal_args , MyGame_Sample_Color_enum_t v0
+#define __MyGame_Sample_Color_call_args , v0
+__flatbuffers_build_scalar(flatbuffers_, MyGame_Sample_Color, MyGame_Sample_Color_enum_t)
+
+#define __MyGame_Sample_Vec3_formal_args , float v0, float v1, float v2
+#define __MyGame_Sample_Vec3_call_args , v0, v1, v2
+static inline MyGame_Sample_Vec3_t *MyGame_Sample_Vec3_assign(MyGame_Sample_Vec3_t *p, float v0, float v1, float v2)
+{ p->x = v0; p->y = v1; p->z = v2;
+ return p; }
+static inline MyGame_Sample_Vec3_t *MyGame_Sample_Vec3_copy(MyGame_Sample_Vec3_t *p, const MyGame_Sample_Vec3_t *p2)
+{ p->x = p2->x; p->y = p2->y; p->z = p2->z;
+ return p; }
+static inline MyGame_Sample_Vec3_t *MyGame_Sample_Vec3_assign_to_pe(MyGame_Sample_Vec3_t *p, float v0, float v1, float v2)
+{ flatbuffers_float_assign_to_pe(&p->x, v0); flatbuffers_float_assign_to_pe(&p->y, v1); flatbuffers_float_assign_to_pe(&p->z, v2);
+ return p; }
+static inline MyGame_Sample_Vec3_t *MyGame_Sample_Vec3_copy_to_pe(MyGame_Sample_Vec3_t *p, const MyGame_Sample_Vec3_t *p2)
+{ flatbuffers_float_copy_to_pe(&p->x, &p2->x); flatbuffers_float_copy_to_pe(&p->y, &p2->y); flatbuffers_float_copy_to_pe(&p->z, &p2->z);
+ return p; }
+static inline MyGame_Sample_Vec3_t *MyGame_Sample_Vec3_assign_from_pe(MyGame_Sample_Vec3_t *p, float v0, float v1, float v2)
+{ flatbuffers_float_assign_from_pe(&p->x, v0); flatbuffers_float_assign_from_pe(&p->y, v1); flatbuffers_float_assign_from_pe(&p->z, v2);
+ return p; }
+static inline MyGame_Sample_Vec3_t *MyGame_Sample_Vec3_copy_from_pe(MyGame_Sample_Vec3_t *p, const MyGame_Sample_Vec3_t *p2)
+{ flatbuffers_float_copy_from_pe(&p->x, &p2->x); flatbuffers_float_copy_from_pe(&p->y, &p2->y); flatbuffers_float_copy_from_pe(&p->z, &p2->z);
+ return p; }
+__flatbuffers_build_struct(flatbuffers_, MyGame_Sample_Vec3, 12, 4, MyGame_Sample_Vec3_file_identifier, MyGame_Sample_Vec3_type_identifier)
+__flatbuffers_define_fixed_array_primitives(flatbuffers_, MyGame_Sample_Vec3, MyGame_Sample_Vec3_t)
+
+typedef flatbuffers_union_ref_t MyGame_Sample_Equipment_union_ref_t;
+typedef flatbuffers_union_vec_ref_t MyGame_Sample_Equipment_union_vec_ref_t;
+static MyGame_Sample_Equipment_union_ref_t MyGame_Sample_Equipment_clone(flatbuffers_builder_t *B, MyGame_Sample_Equipment_union_t t);
+
+static const flatbuffers_voffset_t __MyGame_Sample_Monster_required[] = { 0 };
+typedef flatbuffers_ref_t MyGame_Sample_Monster_ref_t;
+static MyGame_Sample_Monster_ref_t MyGame_Sample_Monster_clone(flatbuffers_builder_t *B, MyGame_Sample_Monster_table_t t);
+__flatbuffers_build_table(flatbuffers_, MyGame_Sample_Monster, 10)
+
+static const flatbuffers_voffset_t __MyGame_Sample_Weapon_required[] = { 0 };
+typedef flatbuffers_ref_t MyGame_Sample_Weapon_ref_t;
+static MyGame_Sample_Weapon_ref_t MyGame_Sample_Weapon_clone(flatbuffers_builder_t *B, MyGame_Sample_Weapon_table_t t);
+__flatbuffers_build_table(flatbuffers_, MyGame_Sample_Weapon, 2)
+
+#define __MyGame_Sample_Monster_formal_args ,\
+ MyGame_Sample_Vec3_t *v0, int16_t v1, int16_t v2, flatbuffers_string_ref_t v3,\
+ flatbuffers_uint8_vec_ref_t v5, MyGame_Sample_Color_enum_t v6, MyGame_Sample_Weapon_vec_ref_t v7, MyGame_Sample_Equipment_union_ref_t v9
+#define __MyGame_Sample_Monster_call_args ,\
+ v0, v1, v2, v3,\
+ v5, v6, v7, v9
+static inline MyGame_Sample_Monster_ref_t MyGame_Sample_Monster_create(flatbuffers_builder_t *B __MyGame_Sample_Monster_formal_args);
+__flatbuffers_build_table_prolog(flatbuffers_, MyGame_Sample_Monster, MyGame_Sample_Monster_file_identifier, MyGame_Sample_Monster_type_identifier)
+
+#define __MyGame_Sample_Weapon_formal_args , flatbuffers_string_ref_t v0, int16_t v1
+#define __MyGame_Sample_Weapon_call_args , v0, v1
+static inline MyGame_Sample_Weapon_ref_t MyGame_Sample_Weapon_create(flatbuffers_builder_t *B __MyGame_Sample_Weapon_formal_args);
+__flatbuffers_build_table_prolog(flatbuffers_, MyGame_Sample_Weapon, MyGame_Sample_Weapon_file_identifier, MyGame_Sample_Weapon_type_identifier)
+
+static inline MyGame_Sample_Equipment_union_ref_t MyGame_Sample_Equipment_as_NONE(void)
+{ MyGame_Sample_Equipment_union_ref_t uref; uref.type = MyGame_Sample_Equipment_NONE; uref.value = 0; return uref; }
+static inline MyGame_Sample_Equipment_union_ref_t MyGame_Sample_Equipment_as_Weapon(MyGame_Sample_Weapon_ref_t ref)
+{ MyGame_Sample_Equipment_union_ref_t uref; uref.type = MyGame_Sample_Equipment_Weapon; uref.value = ref; return uref; }
+__flatbuffers_build_union_vector(flatbuffers_, MyGame_Sample_Equipment)
+
+static MyGame_Sample_Equipment_union_ref_t MyGame_Sample_Equipment_clone(flatbuffers_builder_t *B, MyGame_Sample_Equipment_union_t u)
+{
+ switch (u.type) {
+ case 1: return MyGame_Sample_Equipment_as_Weapon(MyGame_Sample_Weapon_clone(B, (MyGame_Sample_Weapon_table_t)u.value));
+ default: return MyGame_Sample_Equipment_as_NONE();
+ }
+}
+
+__flatbuffers_build_struct_field(0, flatbuffers_, MyGame_Sample_Monster_pos, MyGame_Sample_Vec3, 12, 4, MyGame_Sample_Monster)
+__flatbuffers_build_scalar_field(1, flatbuffers_, MyGame_Sample_Monster_mana, flatbuffers_int16, int16_t, 2, 2, INT16_C(150), MyGame_Sample_Monster)
+__flatbuffers_build_scalar_field(2, flatbuffers_, MyGame_Sample_Monster_hp, flatbuffers_int16, int16_t, 2, 2, INT16_C(100), MyGame_Sample_Monster)
+__flatbuffers_build_string_field(3, flatbuffers_, MyGame_Sample_Monster_name, MyGame_Sample_Monster)
+/* Skipping build of deprecated field: 'MyGame_Sample_Monster_friendly' */
+
+__flatbuffers_build_vector_field(5, flatbuffers_, MyGame_Sample_Monster_inventory, flatbuffers_uint8, uint8_t, MyGame_Sample_Monster)
+__flatbuffers_build_scalar_field(6, flatbuffers_, MyGame_Sample_Monster_color, MyGame_Sample_Color, MyGame_Sample_Color_enum_t, 1, 1, INT8_C(2), MyGame_Sample_Monster)
+__flatbuffers_build_table_vector_field(7, flatbuffers_, MyGame_Sample_Monster_weapons, MyGame_Sample_Weapon, MyGame_Sample_Monster)
+__flatbuffers_build_union_field(9, flatbuffers_, MyGame_Sample_Monster_equipped, MyGame_Sample_Equipment, MyGame_Sample_Monster)
+__flatbuffers_build_union_table_value_field(flatbuffers_, MyGame_Sample_Monster_equipped, MyGame_Sample_Equipment, Weapon, MyGame_Sample_Weapon)
+
+static inline MyGame_Sample_Monster_ref_t MyGame_Sample_Monster_create(flatbuffers_builder_t *B __MyGame_Sample_Monster_formal_args)
+{
+ if (MyGame_Sample_Monster_start(B)
+ || MyGame_Sample_Monster_pos_add(B, v0)
+ || MyGame_Sample_Monster_name_add(B, v3)
+ || MyGame_Sample_Monster_inventory_add(B, v5)
+ || MyGame_Sample_Monster_weapons_add(B, v7)
+ || MyGame_Sample_Monster_equipped_add_value(B, v9)
+ || MyGame_Sample_Monster_mana_add(B, v1)
+ || MyGame_Sample_Monster_hp_add(B, v2)
+ || MyGame_Sample_Monster_color_add(B, v6)
+ || MyGame_Sample_Monster_equipped_add_type(B, v9.type)) {
+ return 0;
+ }
+ return MyGame_Sample_Monster_end(B);
+}
+
+static MyGame_Sample_Monster_ref_t MyGame_Sample_Monster_clone(flatbuffers_builder_t *B, MyGame_Sample_Monster_table_t t)
+{
+ __flatbuffers_memoize_begin(B, t);
+ if (MyGame_Sample_Monster_start(B)
+ || MyGame_Sample_Monster_pos_pick(B, t)
+ || MyGame_Sample_Monster_name_pick(B, t)
+ || MyGame_Sample_Monster_inventory_pick(B, t)
+ || MyGame_Sample_Monster_weapons_pick(B, t)
+ || MyGame_Sample_Monster_equipped_pick(B, t)
+ || MyGame_Sample_Monster_mana_pick(B, t)
+ || MyGame_Sample_Monster_hp_pick(B, t)
+ || MyGame_Sample_Monster_color_pick(B, t)) {
+ return 0;
+ }
+ __flatbuffers_memoize_end(B, t, MyGame_Sample_Monster_end(B));
+}
+
+__flatbuffers_build_string_field(0, flatbuffers_, MyGame_Sample_Weapon_name, MyGame_Sample_Weapon)
+__flatbuffers_build_scalar_field(1, flatbuffers_, MyGame_Sample_Weapon_damage, flatbuffers_int16, int16_t, 2, 2, INT16_C(0), MyGame_Sample_Weapon)
+
+static inline MyGame_Sample_Weapon_ref_t MyGame_Sample_Weapon_create(flatbuffers_builder_t *B __MyGame_Sample_Weapon_formal_args)
+{
+ if (MyGame_Sample_Weapon_start(B)
+ || MyGame_Sample_Weapon_name_add(B, v0)
+ || MyGame_Sample_Weapon_damage_add(B, v1)) {
+ return 0;
+ }
+ return MyGame_Sample_Weapon_end(B);
+}
+
+static MyGame_Sample_Weapon_ref_t MyGame_Sample_Weapon_clone(flatbuffers_builder_t *B, MyGame_Sample_Weapon_table_t t)
+{
+ __flatbuffers_memoize_begin(B, t);
+ if (MyGame_Sample_Weapon_start(B)
+ || MyGame_Sample_Weapon_name_pick(B, t)
+ || MyGame_Sample_Weapon_damage_pick(B, t)) {
+ return 0;
+ }
+ __flatbuffers_memoize_end(B, t, MyGame_Sample_Weapon_end(B));
+}
+
+#include "flatcc/flatcc_epilogue.h"
+#endif /* MONSTER_BUILDER_H */
diff --git a/examples/monster_reader.h b/examples/monster_reader.h
new file mode 100644
index 0000000..04929cb
--- /dev/null
+++ b/examples/monster_reader.h
@@ -0,0 +1,176 @@
+#ifndef MONSTER_READER_H
+#define MONSTER_READER_H
+
+/* Generated by flatcc 0.6.2 FlatBuffers schema compiler for C by dvide.com */
+
+#ifndef FLATBUFFERS_COMMON_READER_H
+#include "flatbuffers_common_reader.h"
+#endif
+#include "flatcc/flatcc_flatbuffers.h"
+#ifndef __alignas_is_defined
+#include <stdalign.h>
+#endif
+#include "flatcc/flatcc_prologue.h"
+#ifndef flatbuffers_identifier
+#define flatbuffers_identifier 0
+#endif
+#ifndef flatbuffers_extension
+#define flatbuffers_extension "bin"
+#endif
+
+typedef struct MyGame_Sample_Vec3 MyGame_Sample_Vec3_t;
+typedef const MyGame_Sample_Vec3_t *MyGame_Sample_Vec3_struct_t;
+typedef MyGame_Sample_Vec3_t *MyGame_Sample_Vec3_mutable_struct_t;
+typedef const MyGame_Sample_Vec3_t *MyGame_Sample_Vec3_vec_t;
+typedef MyGame_Sample_Vec3_t *MyGame_Sample_Vec3_mutable_vec_t;
+
+typedef const struct MyGame_Sample_Monster_table *MyGame_Sample_Monster_table_t;
+typedef struct MyGame_Sample_Monster_table *MyGame_Sample_Monster_mutable_table_t;
+typedef const flatbuffers_uoffset_t *MyGame_Sample_Monster_vec_t;
+typedef flatbuffers_uoffset_t *MyGame_Sample_Monster_mutable_vec_t;
+typedef const struct MyGame_Sample_Weapon_table *MyGame_Sample_Weapon_table_t;
+typedef struct MyGame_Sample_Weapon_table *MyGame_Sample_Weapon_mutable_table_t;
+typedef const flatbuffers_uoffset_t *MyGame_Sample_Weapon_vec_t;
+typedef flatbuffers_uoffset_t *MyGame_Sample_Weapon_mutable_vec_t;
+#ifndef MyGame_Sample_Vec3_file_identifier
+#define MyGame_Sample_Vec3_file_identifier 0
+#endif
+/* deprecated, use MyGame_Sample_Vec3_file_identifier */
+#ifndef MyGame_Sample_Vec3_identifier
+#define MyGame_Sample_Vec3_identifier 0
+#endif
+#define MyGame_Sample_Vec3_type_hash ((flatbuffers_thash_t)0x950fd0dc)
+#define MyGame_Sample_Vec3_type_identifier "\xdc\xd0\x0f\x95"
+#ifndef MyGame_Sample_Vec3_file_extension
+#define MyGame_Sample_Vec3_file_extension "bin"
+#endif
+#ifndef MyGame_Sample_Monster_file_identifier
+#define MyGame_Sample_Monster_file_identifier 0
+#endif
+/* deprecated, use MyGame_Sample_Monster_file_identifier */
+#ifndef MyGame_Sample_Monster_identifier
+#define MyGame_Sample_Monster_identifier 0
+#endif
+#define MyGame_Sample_Monster_type_hash ((flatbuffers_thash_t)0xd5be61b)
+#define MyGame_Sample_Monster_type_identifier "\x1b\xe6\x5b\x0d"
+#ifndef MyGame_Sample_Monster_file_extension
+#define MyGame_Sample_Monster_file_extension "bin"
+#endif
+#ifndef MyGame_Sample_Weapon_file_identifier
+#define MyGame_Sample_Weapon_file_identifier 0
+#endif
+/* deprecated, use MyGame_Sample_Weapon_file_identifier */
+#ifndef MyGame_Sample_Weapon_identifier
+#define MyGame_Sample_Weapon_identifier 0
+#endif
+#define MyGame_Sample_Weapon_type_hash ((flatbuffers_thash_t)0x22292e53)
+#define MyGame_Sample_Weapon_type_identifier "\x53\x2e\x29\x22"
+#ifndef MyGame_Sample_Weapon_file_extension
+#define MyGame_Sample_Weapon_file_extension "bin"
+#endif
+
+typedef int8_t MyGame_Sample_Color_enum_t;
+__flatbuffers_define_integer_type(MyGame_Sample_Color, MyGame_Sample_Color_enum_t, 8)
+#define MyGame_Sample_Color_Red ((MyGame_Sample_Color_enum_t)INT8_C(0))
+#define MyGame_Sample_Color_Green ((MyGame_Sample_Color_enum_t)INT8_C(1))
+#define MyGame_Sample_Color_Blue ((MyGame_Sample_Color_enum_t)INT8_C(2))
+
+static inline const char *MyGame_Sample_Color_name(MyGame_Sample_Color_enum_t value)
+{
+ switch (value) {
+ case MyGame_Sample_Color_Red: return "Red";
+ case MyGame_Sample_Color_Green: return "Green";
+ case MyGame_Sample_Color_Blue: return "Blue";
+ default: return "";
+ }
+}
+
+static inline int MyGame_Sample_Color_is_known_value(MyGame_Sample_Color_enum_t value)
+{
+ switch (value) {
+ case MyGame_Sample_Color_Red: return 1;
+ case MyGame_Sample_Color_Green: return 1;
+ case MyGame_Sample_Color_Blue: return 1;
+ default: return 0;
+ }
+}
+
+
+struct MyGame_Sample_Vec3 {
+ alignas(4) float x;
+ alignas(4) float y;
+ alignas(4) float z;
+};
+static_assert(sizeof(MyGame_Sample_Vec3_t) == 12, "struct size mismatch");
+
+static inline const MyGame_Sample_Vec3_t *MyGame_Sample_Vec3__const_ptr_add(const MyGame_Sample_Vec3_t *p, size_t i) { return p + i; }
+static inline MyGame_Sample_Vec3_t *MyGame_Sample_Vec3__ptr_add(MyGame_Sample_Vec3_t *p, size_t i) { return p + i; }
+static inline MyGame_Sample_Vec3_struct_t MyGame_Sample_Vec3_vec_at(MyGame_Sample_Vec3_vec_t vec, size_t i)
+__flatbuffers_struct_vec_at(vec, i)
+static inline size_t MyGame_Sample_Vec3__size(void) { return 12; }
+static inline size_t MyGame_Sample_Vec3_vec_len(MyGame_Sample_Vec3_vec_t vec)
+__flatbuffers_vec_len(vec)
+__flatbuffers_struct_as_root(MyGame_Sample_Vec3)
+
+__flatbuffers_define_struct_scalar_field(MyGame_Sample_Vec3, x, flatbuffers_float, float)
+__flatbuffers_define_struct_scalar_field(MyGame_Sample_Vec3, y, flatbuffers_float, float)
+__flatbuffers_define_struct_scalar_field(MyGame_Sample_Vec3, z, flatbuffers_float, float)
+
+typedef uint8_t MyGame_Sample_Equipment_union_type_t;
+__flatbuffers_define_integer_type(MyGame_Sample_Equipment, MyGame_Sample_Equipment_union_type_t, 8)
+__flatbuffers_define_union(flatbuffers_, MyGame_Sample_Equipment)
+#define MyGame_Sample_Equipment_NONE ((MyGame_Sample_Equipment_union_type_t)UINT8_C(0))
+#define MyGame_Sample_Equipment_Weapon ((MyGame_Sample_Equipment_union_type_t)UINT8_C(1))
+
+static inline const char *MyGame_Sample_Equipment_type_name(MyGame_Sample_Equipment_union_type_t type)
+{
+ switch (type) {
+ case MyGame_Sample_Equipment_NONE: return "NONE";
+ case MyGame_Sample_Equipment_Weapon: return "Weapon";
+ default: return "";
+ }
+}
+
+static inline int MyGame_Sample_Equipment_is_known_type(MyGame_Sample_Equipment_union_type_t type)
+{
+ switch (type) {
+ case MyGame_Sample_Equipment_NONE: return 1;
+ case MyGame_Sample_Equipment_Weapon: return 1;
+ default: return 0;
+ }
+}
+
+
+struct MyGame_Sample_Monster_table { uint8_t unused__; };
+
+static inline size_t MyGame_Sample_Monster_vec_len(MyGame_Sample_Monster_vec_t vec)
+__flatbuffers_vec_len(vec)
+static inline MyGame_Sample_Monster_table_t MyGame_Sample_Monster_vec_at(MyGame_Sample_Monster_vec_t vec, size_t i)
+__flatbuffers_offset_vec_at(MyGame_Sample_Monster_table_t, vec, i, 0)
+__flatbuffers_table_as_root(MyGame_Sample_Monster)
+
+__flatbuffers_define_struct_field(0, MyGame_Sample_Monster, pos, MyGame_Sample_Vec3_struct_t, 0)
+__flatbuffers_define_scalar_field(1, MyGame_Sample_Monster, mana, flatbuffers_int16, int16_t, INT16_C(150))
+__flatbuffers_define_scalar_field(2, MyGame_Sample_Monster, hp, flatbuffers_int16, int16_t, INT16_C(100))
+__flatbuffers_define_string_field(3, MyGame_Sample_Monster, name, 0)
+/* Skipping deprecated field: 'MyGame_Sample_Monster_friendly' */
+
+__flatbuffers_define_vector_field(5, MyGame_Sample_Monster, inventory, flatbuffers_uint8_vec_t, 0)
+__flatbuffers_define_scalar_field(6, MyGame_Sample_Monster, color, MyGame_Sample_Color, MyGame_Sample_Color_enum_t, INT8_C(2))
+__flatbuffers_define_vector_field(7, MyGame_Sample_Monster, weapons, MyGame_Sample_Weapon_vec_t, 0)
+__flatbuffers_define_union_field(flatbuffers_, 9, MyGame_Sample_Monster, equipped, MyGame_Sample_Equipment, 0)
+
+struct MyGame_Sample_Weapon_table { uint8_t unused__; };
+
+static inline size_t MyGame_Sample_Weapon_vec_len(MyGame_Sample_Weapon_vec_t vec)
+__flatbuffers_vec_len(vec)
+static inline MyGame_Sample_Weapon_table_t MyGame_Sample_Weapon_vec_at(MyGame_Sample_Weapon_vec_t vec, size_t i)
+__flatbuffers_offset_vec_at(MyGame_Sample_Weapon_table_t, vec, i, 0)
+__flatbuffers_table_as_root(MyGame_Sample_Weapon)
+
+__flatbuffers_define_string_field(0, MyGame_Sample_Weapon, name, 0)
+__flatbuffers_define_scalar_field(1, MyGame_Sample_Weapon, damage, flatbuffers_int16, int16_t, INT16_C(0))
+
+
+#include "flatcc/flatcc_epilogue.h"
+#endif /* MONSTER_READER_H */
diff --git a/examples/monster_verifier.h b/examples/monster_verifier.h
new file mode 100644
index 0000000..f1b49a4
--- /dev/null
+++ b/examples/monster_verifier.h
@@ -0,0 +1,106 @@
+#ifndef MONSTER_VERIFIER_H
+#define MONSTER_VERIFIER_H
+
+/* Generated by flatcc 0.6.2 FlatBuffers schema compiler for C by dvide.com */
+
+#ifndef MONSTER_READER_H
+#include "monster_reader.h"
+#endif
+#include "flatcc/flatcc_verifier.h"
+#include "flatcc/flatcc_prologue.h"
+
+static int MyGame_Sample_Monster_verify_table(flatcc_table_verifier_descriptor_t *td);
+static int MyGame_Sample_Weapon_verify_table(flatcc_table_verifier_descriptor_t *td);
+
+static int MyGame_Sample_Equipment_union_verifier(flatcc_union_verifier_descriptor_t *ud)
+{
+ switch (ud->type) {
+ case 1: return flatcc_verify_union_table(ud, MyGame_Sample_Weapon_verify_table); /* Weapon */
+ default: return flatcc_verify_ok;
+ }
+}
+
+static inline int MyGame_Sample_Vec3_verify_as_root(const void *buf, size_t bufsiz)
+{
+ return flatcc_verify_struct_as_root(buf, bufsiz, MyGame_Sample_Vec3_identifier, 12, 4);
+}
+
+static inline int MyGame_Sample_Vec3_verify_as_typed_root(const void *buf, size_t bufsiz)
+{
+ return flatcc_verify_struct_as_typed_root(buf, bufsiz, MyGame_Sample_Vec3_type_hash, 12, 4);
+}
+
+static inline int MyGame_Sample_Vec3_verify_as_root_with_type_hash(const void *buf, size_t bufsiz, flatbuffers_thash_t thash)
+{
+ return flatcc_verify_struct_as_typed_root(buf, bufsiz, thash, 12, 4);
+}
+
+static inline int MyGame_Sample_Vec3_verify_as_root_with_identifier(const void *buf, size_t bufsiz, const char *fid)
+{
+ return flatcc_verify_struct_as_root(buf, bufsiz, fid, 12, 4);
+}
+
+static int MyGame_Sample_Monster_verify_table(flatcc_table_verifier_descriptor_t *td)
+{
+ int ret;
+ if ((ret = flatcc_verify_field(td, 0, 12, 4) /* pos */)) return ret;
+ if ((ret = flatcc_verify_field(td, 1, 2, 2) /* mana */)) return ret;
+ if ((ret = flatcc_verify_field(td, 2, 2, 2) /* hp */)) return ret;
+ if ((ret = flatcc_verify_string_field(td, 3, 0) /* name */)) return ret;
+ if ((ret = flatcc_verify_vector_field(td, 5, 0, 1, 1, INT64_C(4294967295)) /* inventory */)) return ret;
+ if ((ret = flatcc_verify_field(td, 6, 1, 1) /* color */)) return ret;
+ if ((ret = flatcc_verify_table_vector_field(td, 7, 0, &MyGame_Sample_Weapon_verify_table) /* weapons */)) return ret;
+ if ((ret = flatcc_verify_union_field(td, 9, 0, &MyGame_Sample_Equipment_union_verifier) /* equipped */)) return ret;
+ return flatcc_verify_ok;
+}
+
+static inline int MyGame_Sample_Monster_verify_as_root(const void *buf, size_t bufsiz)
+{
+ return flatcc_verify_table_as_root(buf, bufsiz, MyGame_Sample_Monster_identifier, &MyGame_Sample_Monster_verify_table);
+}
+
+static inline int MyGame_Sample_Monster_verify_as_typed_root(const void *buf, size_t bufsiz)
+{
+ return flatcc_verify_table_as_root(buf, bufsiz, MyGame_Sample_Monster_type_identifier, &MyGame_Sample_Monster_verify_table);
+}
+
+static inline int MyGame_Sample_Monster_verify_as_root_with_identifier(const void *buf, size_t bufsiz, const char *fid)
+{
+ return flatcc_verify_table_as_root(buf, bufsiz, fid, &MyGame_Sample_Monster_verify_table);
+}
+
+static inline int MyGame_Sample_Monster_verify_as_root_with_type_hash(const void *buf, size_t bufsiz, flatbuffers_thash_t thash)
+{
+ return flatcc_verify_table_as_typed_root(buf, bufsiz, thash, &MyGame_Sample_Monster_verify_table);
+}
+
+static int MyGame_Sample_Weapon_verify_table(flatcc_table_verifier_descriptor_t *td)
+{
+ int ret;
+ if ((ret = flatcc_verify_string_field(td, 0, 0) /* name */)) return ret;
+ if ((ret = flatcc_verify_field(td, 1, 2, 2) /* damage */)) return ret;
+ return flatcc_verify_ok;
+}
+
+static inline int MyGame_Sample_Weapon_verify_as_root(const void *buf, size_t bufsiz)
+{
+ return flatcc_verify_table_as_root(buf, bufsiz, MyGame_Sample_Weapon_identifier, &MyGame_Sample_Weapon_verify_table);
+}
+
+static inline int MyGame_Sample_Weapon_verify_as_typed_root(const void *buf, size_t bufsiz)
+{
+ return flatcc_verify_table_as_root(buf, bufsiz, MyGame_Sample_Weapon_type_identifier, &MyGame_Sample_Weapon_verify_table);
+}
+
+static inline int MyGame_Sample_Weapon_verify_as_root_with_identifier(const void *buf, size_t bufsiz, const char *fid)
+{
+ return flatcc_verify_table_as_root(buf, bufsiz, fid, &MyGame_Sample_Weapon_verify_table);
+}
+
+static inline int MyGame_Sample_Weapon_verify_as_root_with_type_hash(const void *buf, size_t bufsiz, flatbuffers_thash_t thash)
+{
+ return flatcc_verify_table_as_typed_root(buf, bufsiz, thash, &MyGame_Sample_Weapon_verify_table);
+}
+
+#include "flatcc/flatcc_epilogue.h"
+#endif /* MONSTER_VERIFIER_H */
diff --git a/flatcc/include/flatcc/portable/paligned_alloc.h b/flatcc/include/flatcc/portable/paligned_alloc.h
index 70b00b9..8825de1 100644
--- a/flatcc/include/flatcc/portable/paligned_alloc.h
+++ b/flatcc/include/flatcc/portable/paligned_alloc.h
@@ -112,7 +112,7 @@ extern "C" {
#ifdef PORTABLE_DEBUG_ALIGNED_ALLOC
#error "DEBUG: C11_ALIGNED_ALLOC configured"
#endif
-#elif defined(_MSC_VER) || defined(__MINGW32__)
+#elif 0
#ifdef PORTABLE_DEBUG_ALIGNED_ALLOC
#error "DEBUG: Windows _aligned_malloc configured"
diff --git a/flatcc/update.sh b/flatcc/update.sh
new file mode 100755
index 0000000..fb66a0b
--- /dev/null
+++ b/flatcc/update.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env sh
+
+set -e
+set -x
+
+git subtree pull --squash --prefix=flatcc https://github.com/dvidelabs/flatcc.git master