From 522f035cb1f92fec92459c6aeb3e4ca24d73a2b8 Mon Sep 17 00:00:00 2001 From: David Nikdel Date: Thu, 9 May 2024 22:23:16 -0400 Subject: [PATCH] GLTF export improvements - GLBs produced by godot don't pass validation when there's no data in the buffer segment. The segment is dropped but the size of the chunk_header is still reported in total length (incorrectly) - Remove empty "extensions" JSON object being appended to all nodes (if it's still empty). This is just cutting down on unnecessary bloat and consistent with the rest of the file's attempts to not emit any keys that are equal to their default value. - Allow the case where root_nodes is empty. This is permitted by the GLTF spec. Moreover it can happen fairly naturally when using the ROOT_NODE_MODE_MULTI_ROOT root node mode on a scene with only a root node (which is valid in godot). - Don't create an initial buffer until we're ready to write data into it (buffers of byteLength=0 don't pass validation). --- modules/gltf/gltf_document.cpp | 73 ++++++++++++++++++++++++++-------- 1 file changed, 57 insertions(+), 16 deletions(-) diff --git a/modules/gltf/gltf_document.cpp b/modules/gltf/gltf_document.cpp index 575702bc544..b92176a63a6 100644 --- a/modules/gltf/gltf_document.cpp +++ b/modules/gltf/gltf_document.cpp @@ -105,10 +105,6 @@ static Ref _mesh_to_importer_mesh(Ref p_mesh) { } Error GLTFDocument::_serialize(Ref p_state) { - if (!p_state->buffers.size()) { - p_state->buffers.push_back(Vector()); - } - for (Ref ext : document_extensions) { ERR_CONTINUE(ext.is_null()); Error err = ext->export_preserialize(p_state); @@ -243,7 +239,6 @@ Error GLTFDocument::_serialize_gltf_extensions(Ref p_state) const { } Error GLTFDocument::_serialize_scenes(Ref p_state) { - ERR_FAIL_COND_V_MSG(p_state->root_nodes.is_empty(), ERR_INVALID_DATA, "GLTF export: The scene must have at least one root node."); // Godot only supports one scene per glTF file. Array scenes; Dictionary scene_dict; @@ -251,7 +246,9 @@ Error GLTFDocument::_serialize_scenes(Ref p_state) { p_state->json["scenes"] = scenes; p_state->json["scene"] = 0; // Add nodes to the scene dict. - scene_dict["nodes"] = p_state->root_nodes; + if (!p_state->root_nodes.is_empty()) { + scene_dict["nodes"] = p_state->root_nodes; + } if (!p_state->scene_name.is_empty()) { scene_dict["name"] = p_state->scene_name; } @@ -458,9 +455,15 @@ Error GLTFDocument::_serialize_nodes(Ref p_state) { ERR_CONTINUE(err != OK); } + if (extensions.is_empty()) { + node.erase("extensions"); + } + nodes.push_back(node); } - p_state->json["nodes"] = nodes; + if (!nodes.is_empty()) { + p_state->json["nodes"] = nodes; + } return OK; } @@ -691,11 +694,11 @@ static Vector _parse_base64_uri(const String &p_uri) { Error GLTFDocument::_encode_buffer_glb(Ref p_state, const String &p_path) { print_verbose("glTF: Total buffers: " + itos(p_state->buffers.size())); - if (!p_state->buffers.size()) { + if (p_state->buffers.is_empty()) { return OK; } Array buffers; - if (p_state->buffers.size()) { + if (!p_state->buffers.is_empty()) { Vector buffer_data = p_state->buffers[0]; Dictionary gltf_buffer; @@ -730,7 +733,7 @@ Error GLTFDocument::_encode_buffer_glb(Ref p_state, const String &p_p Error GLTFDocument::_encode_buffer_bins(Ref p_state, const String &p_path) { print_verbose("glTF: Total buffers: " + itos(p_state->buffers.size())); - if (!p_state->buffers.size()) { + if (p_state->buffers.is_empty()) { return OK; } Array buffers; @@ -1543,6 +1546,9 @@ GLTFAccessorIndex GLTFDocument::_encode_accessor_as_ints(Ref p_state, Ref accessor; accessor.instantiate(); GLTFBufferIndex buffer_view_i; + if (p_state->buffers.is_empty()) { + p_state->buffers.push_back(Vector()); + } int64_t size = p_state->buffers[0].size(); const GLTFType type = GLTFType::TYPE_SCALAR; int component_type; @@ -1654,6 +1660,9 @@ GLTFAccessorIndex GLTFDocument::_encode_accessor_as_vec2(Ref p_state, Ref accessor; accessor.instantiate(); GLTFBufferIndex buffer_view_i; + if (p_state->buffers.is_empty()) { + p_state->buffers.push_back(Vector()); + } int64_t size = p_state->buffers[0].size(); const GLTFType type = GLTFType::TYPE_VEC2; const int component_type = GLTFDocument::COMPONENT_TYPE_FLOAT; @@ -1704,6 +1713,9 @@ GLTFAccessorIndex GLTFDocument::_encode_accessor_as_color(Ref p_state Ref accessor; accessor.instantiate(); GLTFBufferIndex buffer_view_i; + if (p_state->buffers.is_empty()) { + p_state->buffers.push_back(Vector()); + } int64_t size = p_state->buffers[0].size(); const GLTFType type = GLTFType::TYPE_VEC4; const int component_type = GLTFDocument::COMPONENT_TYPE_FLOAT; @@ -1768,6 +1780,9 @@ GLTFAccessorIndex GLTFDocument::_encode_accessor_as_weights(Ref p_sta Ref accessor; accessor.instantiate(); GLTFBufferIndex buffer_view_i; + if (p_state->buffers.is_empty()) { + p_state->buffers.push_back(Vector()); + } int64_t size = p_state->buffers[0].size(); const GLTFType type = GLTFType::TYPE_VEC4; const int component_type = GLTFDocument::COMPONENT_TYPE_FLOAT; @@ -1816,6 +1831,9 @@ GLTFAccessorIndex GLTFDocument::_encode_accessor_as_joints(Ref p_stat Ref accessor; accessor.instantiate(); GLTFBufferIndex buffer_view_i; + if (p_state->buffers.is_empty()) { + p_state->buffers.push_back(Vector()); + } int64_t size = p_state->buffers[0].size(); const GLTFType type = GLTFType::TYPE_VEC4; const int component_type = GLTFDocument::COMPONENT_TYPE_UNSIGNED_SHORT; @@ -1866,6 +1884,9 @@ GLTFAccessorIndex GLTFDocument::_encode_accessor_as_quaternions(Ref p Ref accessor; accessor.instantiate(); GLTFBufferIndex buffer_view_i; + if (p_state->buffers.is_empty()) { + p_state->buffers.push_back(Vector()); + } int64_t size = p_state->buffers[0].size(); const GLTFType type = GLTFType::TYPE_VEC4; const int component_type = GLTFDocument::COMPONENT_TYPE_FLOAT; @@ -1938,6 +1959,9 @@ GLTFAccessorIndex GLTFDocument::_encode_accessor_as_floats(Ref p_stat Ref accessor; accessor.instantiate(); GLTFBufferIndex buffer_view_i; + if (p_state->buffers.is_empty()) { + p_state->buffers.push_back(Vector()); + } int64_t size = p_state->buffers[0].size(); const GLTFType type = GLTFType::TYPE_SCALAR; const int component_type = GLTFDocument::COMPONENT_TYPE_FLOAT; @@ -1985,6 +2009,9 @@ GLTFAccessorIndex GLTFDocument::_encode_accessor_as_vec3(Ref p_state, Ref accessor; accessor.instantiate(); GLTFBufferIndex buffer_view_i; + if (p_state->buffers.is_empty()) { + p_state->buffers.push_back(Vector()); + } int64_t size = p_state->buffers[0].size(); const GLTFType type = GLTFType::TYPE_VEC3; const int component_type = GLTFDocument::COMPONENT_TYPE_FLOAT; @@ -2058,6 +2085,9 @@ GLTFAccessorIndex GLTFDocument::_encode_sparse_accessor_as_vec3(Ref p Ref sparse_accessor; sparse_accessor.instantiate(); + if (p_state->buffers.is_empty()) { + p_state->buffers.push_back(Vector()); + } int64_t size = p_state->buffers[0].size(); const GLTFType type = GLTFType::TYPE_VEC3; const int component_type = GLTFDocument::COMPONENT_TYPE_FLOAT; @@ -2160,6 +2190,9 @@ GLTFAccessorIndex GLTFDocument::_encode_accessor_as_xform(Ref p_state Ref accessor; accessor.instantiate(); GLTFBufferIndex buffer_view_i; + if (p_state->buffers.is_empty()) { + p_state->buffers.push_back(Vector()); + } int64_t size = p_state->buffers[0].size(); const GLTFType type = GLTFType::TYPE_MAT4; const int component_type = GLTFDocument::COMPONENT_TYPE_FLOAT; @@ -6944,7 +6977,7 @@ Error GLTFDocument::_serialize_file(Ref p_state, const String p_path) const uint32_t text_chunk_type = 0x4E4F534A; //JSON uint32_t binary_data_length = 0; - if (p_state->buffers.size()) { + if (p_state->buffers.size() > 0) { binary_data_length = p_state->buffers[0].size(); } const uint32_t binary_chunk_length = ((binary_data_length + 3) & (~3)); @@ -6953,20 +6986,28 @@ Error GLTFDocument::_serialize_file(Ref p_state, const String p_path) file->create(FileAccess::ACCESS_RESOURCES); file->store_32(magic); file->store_32(p_state->major_version); // version - file->store_32(header_size + chunk_header_size + text_chunk_length + chunk_header_size + binary_chunk_length); // length + uint32_t total_length = header_size + chunk_header_size + text_chunk_length; + if (binary_chunk_length) { + total_length += chunk_header_size + binary_chunk_length; + } + file->store_32(total_length); + + // Write the JSON text chunk. file->store_32(text_chunk_length); file->store_32(text_chunk_type); file->store_buffer((uint8_t *)&cs[0], cs.length()); for (uint32_t pad_i = text_data_length; pad_i < text_chunk_length; pad_i++) { file->store_8(' '); } + + // Write a single binary chunk. if (binary_chunk_length) { file->store_32(binary_chunk_length); file->store_32(binary_chunk_type); file->store_buffer(p_state->buffers[0].ptr(), binary_data_length); - } - for (uint32_t pad_i = binary_data_length; pad_i < binary_chunk_length; pad_i++) { - file->store_8(0); + for (uint32_t pad_i = binary_data_length; pad_i < binary_chunk_length; pad_i++) { + file->store_8(0); + } } } else { err = _encode_buffer_bins(p_state, p_path); @@ -7073,7 +7114,7 @@ PackedByteArray GLTFDocument::_serialize_glb_buffer(Ref p_state, Erro const uint32_t text_chunk_type = 0x4E4F534A; //JSON int32_t binary_data_length = 0; - if (p_state->buffers.size()) { + if (p_state->buffers.size() > 0) { binary_data_length = p_state->buffers[0].size(); } const int32_t binary_chunk_length = binary_data_length;