Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Small allocation fix in fbx loader #4494

Merged
merged 10 commits into from May 16, 2023
10 changes: 4 additions & 6 deletions code/AssetLib/FBX/FBXBinaryTokenizer.cpp
Expand Up @@ -341,8 +341,7 @@ void ReadData(const char*& sbegin_out, const char*& send_out, const char* input,


// ------------------------------------------------------------------------------------------------
bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor, const char* end, bool const is64bits)
{
bool ReadScope(TokenList &output_tokens, StackAllocator &token_allocator, const char *input, const char *&cursor, const char *end, bool const is64bits) {
// the first word contains the offset at which this block ends
const uint64_t end_offset = is64bits ? ReadDoubleWord(input, cursor, end) : ReadWord(input, cursor, end);

Expand Down Expand Up @@ -408,7 +407,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,

// XXX this is vulnerable to stack overflowing ..
while(Offset(input, cursor) < end_offset - sentinel_block_length) {
ReadScope(output_tokens, input, cursor, input + end_offset - sentinel_block_length, is64bits);
ReadScope(output_tokens, token_allocator, input, cursor, input + end_offset - sentinel_block_length, is64bits);
}
output_tokens.push_back(new_Token(cursor, cursor + 1, TokenType_CLOSE_BRACKET, Offset(input, cursor) ));

Expand All @@ -431,8 +430,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,

// ------------------------------------------------------------------------------------------------
// TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
{
void TokenizeBinary(TokenList &output_tokens, const char *input, size_t length, StackAllocator &token_allocator) {
ai_assert(input);
ASSIMP_LOG_DEBUG("Tokenizing binary FBX file");

Expand Down Expand Up @@ -465,7 +463,7 @@ void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
try
{
while (cursor < end ) {
if (!ReadScope(output_tokens, input, cursor, input + length, is64bits)) {
if (!ReadScope(output_tokens, token_allocator, input, cursor, input + length, is64bits)) {
break;
}
}
Expand Down
30 changes: 19 additions & 11 deletions code/AssetLib/FBX/FBXDocument.cpp
Expand Up @@ -235,7 +235,7 @@ FileGlobalSettings::FileGlobalSettings(const Document &doc, std::shared_ptr<cons
}

// ------------------------------------------------------------------------------------------------
Document::Document(const Parser& parser, const ImportSettings& settings) :
Document::Document(Parser& parser, const ImportSettings& settings) :
settings(settings), parser(parser) {
ASSIMP_LOG_DEBUG("Creating FBX Document");

Expand All @@ -257,13 +257,17 @@ Document::Document(const Parser& parser, const ImportSettings& settings) :
}

// ------------------------------------------------------------------------------------------------
Document::~Document() {
for(ObjectMap::value_type& v : objects) {
delete v.second;
Document::~Document()
{
// The document does not own the memory for the following objects, but we need to call their d'tor
// so they can properly free memory like string members:

for (ObjectMap::value_type &v : objects) {
delete_LazyObject(v.second);
}

for(ConnectionMap::value_type& v : src_connections) {
delete v.second;
for (ConnectionMap::value_type &v : src_connections) {
delete_Connection(v.second);
}
// |dest_connections| contain the same Connection objects as the |src_connections|
}
Expand Down Expand Up @@ -348,9 +352,11 @@ void Document::ReadObjects() {
DOMError("no Objects dictionary found");
}

StackAllocator &allocator = parser.GetAllocator();

// add a dummy entry to represent the Model::RootNode object (id 0),
// which is only indirectly defined in the input file
objects[0] = new LazyObject(0L, *eobjects, *this);
objects[0] = new_LazyObject(0L, *eobjects, *this);

const Scope& sobjects = *eobjects->Compound();
for(const ElementMap::value_type& el : sobjects.Elements()) {
Expand All @@ -377,7 +383,7 @@ void Document::ReadObjects() {
DOMWarning("encountered duplicate object id, ignoring first occurrence",el.second);
}

objects[id] = new LazyObject(id, *el.second, *this);
objects[id] = new_LazyObject(id, *el.second, *this);

// grab all animation stacks upfront since there is no listing of them
if(!strcmp(el.first.c_str(),"AnimationStack")) {
Expand Down Expand Up @@ -444,8 +450,10 @@ void Document::ReadPropertyTemplates() {
}

// ------------------------------------------------------------------------------------------------
void Document::ReadConnections() {
const Scope& sc = parser.GetRootScope();
void Document::ReadConnections()
{
StackAllocator &allocator = parser.GetAllocator();
const Scope &sc = parser.GetRootScope();
// read property templates from "Definitions" section
const Element* const econns = sc["Connections"];
if(!econns || !econns->Compound()) {
Expand Down Expand Up @@ -484,7 +492,7 @@ void Document::ReadConnections() {
}

// add new connection
const Connection* const c = new Connection(insertionOrder++,src,dest,prop,*this);
const Connection* const c = new_Connection(insertionOrder++,src,dest,prop,*this);
src_connections.insert(ConnectionMap::value_type(src,c));
dest_connections.insert(ConnectionMap::value_type(dest,c));
}
Expand Down
8 changes: 6 additions & 2 deletions code/AssetLib/FBX/FBXDocument.h
Expand Up @@ -80,6 +80,10 @@ class BlendShape;
class Skin;
class Cluster;

#define new_LazyObject new (allocator.Allocate(sizeof(LazyObject))) LazyObject
#define new_Connection new (allocator.Allocate(sizeof(Connection))) Connection
#define delete_LazyObject(_p) (_p)->~LazyObject()
#define delete_Connection(_p) (_p)->~Connection()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Object construction and destruction should be delegated to the function related to the allocator. Generally it would be better not to use macross like that overall.

// #include <new>

public:
    template<class T, class... Args>
    T* ConstructObject(Args&&... args)
    {
        return ::new(this->Allocate(sizeof(T))) T(std::forward<Args>(args)...);
    }

    template<class T>
    void DestroyObject(T* obj) noexcept
    {
        obj->~T();
    }


/** Represents a delay-parsed FBX objects. Many objects in the scene
* are not needed by assimp, so it makes no sense to parse them
Expand Down Expand Up @@ -1072,7 +1076,7 @@ class FileGlobalSettings {
/** DOM root for a FBX file */
class Document {
public:
Document(const Parser& parser, const ImportSettings& settings);
Document(Parser& parser, const ImportSettings& settings);

~Document();

Expand Down Expand Up @@ -1156,7 +1160,7 @@ class Document {
const ImportSettings& settings;

ObjectMap objects;
const Parser& parser;
Parser& parser;

PropertyTemplateMap templates;
ConnectionMap src_connections;
Expand Down
20 changes: 11 additions & 9 deletions code/AssetLib/FBX/FBXImporter.cpp
Expand Up @@ -156,19 +156,19 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
// broad-phase tokenized pass in which we identify the core
// syntax elements of FBX (brackets, commas, key:value mappings)
TokenList tokens;
try {

Assimp::StackAllocator tempAllocator;
try {
bool is_binary = false;
if (!strncmp(begin, "Kaydara FBX Binary", 18)) {
is_binary = true;
TokenizeBinary(tokens, begin, contents.size());
TokenizeBinary(tokens, begin, contents.size(), tempAllocator);
} else {
Tokenize(tokens, begin);
Tokenize(tokens, begin, tempAllocator);
}

// use this information to construct a very rudimentary
// parse-tree representing the FBX scope structure
Parser parser(tokens, is_binary);
Parser parser(tokens, tempAllocator, is_binary);

// take the raw parse-tree and convert it to a FBX DOM
Document doc(parser, mSettings);
Expand All @@ -187,10 +187,12 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
// assimp universal format (M)
SetFileScale(size_relative_to_cm * 0.01f);

std::for_each(tokens.begin(), tokens.end(), Util::delete_fun<Token>());
} catch (std::exception &) {
std::for_each(tokens.begin(), tokens.end(), Util::delete_fun<Token>());
throw;
// This collection does not own the memory for the tokens, but we need to call their d'tor
std::for_each(tokens.begin(), tokens.end(), Util::destructor_fun<Token>());

} catch (std::exception &) {
std::for_each(tokens.begin(), tokens.end(), Util::destructor_fun<Token>());
throw;
}
}

Expand Down
42 changes: 30 additions & 12 deletions code/AssetLib/FBX/FBXParser.cpp
Expand Up @@ -115,8 +115,11 @@ namespace Assimp {
namespace FBX {

// ------------------------------------------------------------------------------------------------
Element::Element(const Token& key_token, Parser& parser) : key_token(key_token) {
Element::Element(const Token& key_token, Parser& parser) :
key_token(key_token), compound(nullptr)
{
TokenPtr n = nullptr;
StackAllocator &allocator = parser.GetAllocator();
do {
n = parser.AdvanceToNextToken();
if(!n) {
Expand Down Expand Up @@ -145,7 +148,7 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
}

if (n->Type() == TokenType_OPEN_BRACKET) {
compound.reset(new Scope(parser));
compound = new_Scope(parser);

// current token should be a TOK_CLOSE_BRACKET
n = parser.CurrentToken();
Expand All @@ -163,6 +166,15 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
}

// ------------------------------------------------------------------------------------------------
Element::~Element()
{
if (compound) {
delete_Scope(compound);
}

// no need to delete tokens, they are owned by the parser
}

Scope::Scope(Parser& parser,bool topLevel)
{
if(!topLevel) {
Expand All @@ -172,6 +184,7 @@ Scope::Scope(Parser& parser,bool topLevel)
}
}

StackAllocator &allocator = parser.GetAllocator();
TokenPtr n = parser.AdvanceToNextToken();
if (n == nullptr) {
ParseError("unexpected end of file");
Expand Down Expand Up @@ -202,22 +215,27 @@ Scope::Scope(Parser& parser,bool topLevel)
}

// ------------------------------------------------------------------------------------------------
Scope::~Scope() {
for(ElementMap::value_type& v : elements) {
delete v.second;
Scope::~Scope()
{
// This collection does not own the memory for the elements, but we need to call their d'tor:

for (ElementMap::value_type &v : elements) {
delete_Element(v.second);
}
}

// ------------------------------------------------------------------------------------------------
Parser::Parser (const TokenList& tokens, bool is_binary)
: tokens(tokens)
, last()
, current()
, cursor(tokens.begin())
, is_binary(is_binary)
Parser::Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binary) :
tokens(tokens), allocator(allocator), last(), current(), cursor(tokens.begin()), is_binary(is_binary)
{
ASSIMP_LOG_DEBUG("Parsing FBX tokens");
root.reset(new Scope(*this,true));
root = new_Scope(*this, true);
}

// ------------------------------------------------------------------------------------------------
Parser::~Parser()
{
delete_Scope(root);
}

// ------------------------------------------------------------------------------------------------
Expand Down
28 changes: 17 additions & 11 deletions code/AssetLib/FBX/FBXParser.h
Expand Up @@ -52,6 +52,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <assimp/LogAux.h>
#include <assimp/fast_atof.h>

#include "Common/StackAllocator.h"
#include "FBXCompileConfig.h"
#include "FBXTokenizer.h"

Expand All @@ -68,9 +69,10 @@ typedef std::fbx_unordered_multimap< std::string, Element* > ElementMap;

typedef std::pair<ElementMap::const_iterator,ElementMap::const_iterator> ElementCollection;

# define new_Scope new Scope
# define new_Element new Element

#define new_Scope new (allocator.Allocate(sizeof(Scope))) Scope
#define new_Element new (allocator.Allocate(sizeof(Element))) Element
#define delete_Scope(_p) (_p)->~Scope()
#define delete_Element(_p) (_p)->~Element()

/** FBX data entity that consists of a key:value tuple.
*
Expand All @@ -87,10 +89,10 @@ class Element
{
public:
Element(const Token& key_token, Parser& parser);
~Element() = default;
~Element():

const Scope* Compound() const {
return compound.get();
return compound;
}

const Token& KeyToken() const {
Expand All @@ -104,7 +106,7 @@ class Element
private:
const Token& key_token;
TokenList tokens;
std::unique_ptr<Scope> compound;
Scope* compound;
};

/** FBX data entity that consists of a 'scope', a collection
Expand Down Expand Up @@ -159,17 +161,21 @@ class Parser
public:
/** Parse given a token list. Does not take ownership of the tokens -
* the objects must persist during the entire parser lifetime */
Parser (const TokenList& tokens,bool is_binary);
~Parser() = default;
Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binary);
~Parser();

const Scope& GetRootScope() const {
return *root.get();
return *root;
}

bool IsBinary() const {
return is_binary;
}

StackAllocator &GetAllocator() {
return allocator;
}

private:
friend class Scope;
friend class Element;
Expand All @@ -180,10 +186,10 @@ class Parser

private:
const TokenList& tokens;

StackAllocator &allocator;
TokenPtr last, current;
TokenList::const_iterator cursor;
std::unique_ptr<Scope> root;
Scope *root;

const bool is_binary;
};
Expand Down