Skip to content

Commit

Permalink
code refactoring using C++11 features (nullptr, auto, lambdas, forach…
Browse files Browse the repository at this point in the history
… etc)
  • Loading branch information
Koziev committed Apr 19, 2018
1 parent 0317f94 commit 444ab7a
Show file tree
Hide file tree
Showing 240 changed files with 37,984 additions and 38,778 deletions.
266 changes: 132 additions & 134 deletions src/ai/aa/aa_autom.cpp
Expand Up @@ -4,11 +4,7 @@
// (c) by Elijah Koziev
//
// SOLARIX Intellectronix Project http://www.solarix.ru
// http://sourceforge.net/projects/solarix
//
// Licensed under the terms of GNU Lesser GPL
// You must not eliminate, delete or supress these copyright strings
// from the file!
// https://github.com/Koziev/GrammarEngine
//
// Content:
// SOLARIX Grammar engine
Expand All @@ -25,7 +21,7 @@
// -----------------------------------------------------------------------------
//
// CD->25.02.1997
// LC->02.02.2013
// LC->02.04.2018
// --------------

#include <lem/solarix/tokens.h>
Expand All @@ -46,199 +42,201 @@
using namespace lem;
using namespace Solarix;

AlephAutomat::AlephAutomat():PM_Automat(SOL_AA_INDEX)
AlephAutomat::AlephAutomat() :PM_Automat(SOL_AA_INDEX)
{
do_delete_storage = false;
storage = NULL;
param = new CriteriaInStorage();
return;
do_delete_storage = false;
storage = nullptr;
param = new CriteriaInStorage();
return;
}

AlephAutomat::~AlephAutomat(void)
{
if( do_delete_storage )
lem_rub_off(storage);
if (do_delete_storage)
lem_rub_off(storage);

return;
return;
}


SynGram& AlephAutomat::GetSynGram(void) const
{ return dict->GetSynGram(); }
{
return dict->GetSynGram();
}


#if defined SOL_LOADTXT && defined SOL_COMPILER
bool AlephAutomat::ProcessLexem(
const BethToken &t,
Macro_Parser &txtfile,
const Binarization_Options &options
)
const BethToken &t,
Macro_Parser &txtfile,
const Binarization_Options &options
)
{
return PM_Automat::ProcessLexem( t, txtfile, options );
return PM_Automat::ProcessLexem(t, txtfile, options);
}

#endif


void AlephAutomat::SetStorage( LexiconStorage *stg, bool _do_delete )
void AlephAutomat::SetStorage(LexiconStorage *stg, bool _do_delete)
{
if( do_delete_storage )
delete storage;
if (do_delete_storage)
delete storage;

storage = stg;
do_delete_storage = _do_delete;
storage = stg;
do_delete_storage = _do_delete;

PM_Automat::SetStorage( L"aa", stg );
static_cast<CriteriaInStorage*>(param)->Connect( (CriteriaStorage*)stg );
return;
PM_Automat::SetStorage(L"aa", stg);
static_cast<CriteriaInStorage*>(param)->Connect((CriteriaStorage*)stg);
return;
}

#if defined SOL_REPORT
void AlephAutomat::SaveRules_SQL( OFormatter &out, OFormatter &alters, const SQL_Production &sql_version )
void AlephAutomat::SaveRules_SQL(OFormatter &out, OFormatter &alters, const SQL_Production &sql_version)
{
if( sql_version.norules )
return;
if (sql_version.norules)
return;

PM_Automat::SaveRules_SQL( "aa", out, alters, sql_version );
return;
PM_Automat::SaveRules_SQL("aa", out, alters, sql_version);
return;
}
#endif

#if defined SOL_LOADTXT && defined SOL_COMPILER
void AlephAutomat::Prepare( const lem::Path &outdir, const Binarization_Options &opts )
void AlephAutomat::Prepare(const lem::Path &outdir, const Binarization_Options &opts)
{
// Äëèòåëüíàÿ îïåðàöèÿ: ñîîáùàåì ïîëüçîâàòåëþ.
GetIO().mecho().printf( "%vfEAA%vn: preparing..." );
GetIO().mecho().flush();
// Äëèòåëüíàÿ îïåðàöèÿ: ñîîáùàåì ïîëüçîâàòåëþ.
GetIO().mecho().printf("%vfEAA%vn: preparing...");
GetIO().mecho().flush();

PM_Automat::Prepare(outdir,opts);
PM_Automat::Prepare(outdir, opts);

GetIO().mecho().printf( "%vfAOK%vn\n" );
GetIO().mecho().flush();
GetIO().mecho().printf("%vfAOK%vn\n");
GetIO().mecho().flush();

return;
return;
}
#endif


#if defined SOL_CAA
const lem::UCString & AlephAutomat::GetSyntacticAnalyzerName( int id_language )
const lem::UCString & AlephAutomat::GetSyntacticAnalyzerName(int id_language)
{
#if defined LEM_THREADS
lem::Process::RWU_ReaderGuard rlock(cs_language2syntan);
#endif

std::map< int /*id_language*/, lem::UCString >::const_iterator it = language2syntan.find(id_language);
if( it==language2syntan.end() )
{
#if defined LEM_THREADS
lem::Process::RWU_WriterGuard wlock(rlock);
#endif

it = language2syntan.find(id_language);
if( it==language2syntan.end() )
#if defined LEM_THREADS
lem::Process::RWU_ReaderGuard rlock(cs_language2syntan);
#endif

auto it = language2syntan.find(id_language);
if (it == language2syntan.end())
{
lem::UCString name;

const SG_Language & lang = GetDict().GetSynGram().languages()[id_language];
const int iparam = lang.FindParam(L"SyntacticAnalyzer");

if( iparam!=UNKNOWN )
{
name = lang.GetParam(iparam).GetValue().c_str();
name.strip_quotes();
}

language2syntan.insert( std::make_pair( id_language, name ) );
return language2syntan.find(id_language)->second;
#if defined LEM_THREADS
lem::Process::RWU_WriterGuard wlock(rlock);
#endif

it = language2syntan.find(id_language);
if (it == language2syntan.end())
{
lem::UCString name;

const SG_Language & lang = GetDict().GetSynGram().languages()[id_language];
const int iparam = lang.FindParam(L"SyntacticAnalyzer");

if (iparam != UNKNOWN)
{
name = lang.GetParam(iparam).GetValue().c_str();
name.strip_quotes();
}

language2syntan.insert(std::make_pair(id_language, name));
return language2syntan.find(id_language)->second;
}
else
{
return it->second;
}
}
else
else
{
return it->second;
return it->second;
}
}
else
{
return it->second;
}
}
#endif



#if defined SOL_CAA
Res_Pack* AlephAutomat::Analyze(
BasicLexer & lexer,
TrWideContext & env,
const ElapsedTimeConstraint & constraints,
TrTrace * trace
)
BasicLexer & lexer,
TrWideContext & env,
const ElapsedTimeConstraint & constraints,
TrTrace * trace
)
{
// Äëÿ êàæäîãî ÿçûêà ìîæåò áûòü çàäàíà îòäåëüíàÿ ïðîöåäóðà ñèíòàíà. Åå èìÿ óêàçàíî â ïàðàìåòðàõ ÿçûêà.
const int language_id = lexer.GetParams().GetLanguageID();
const lem::UCString & proc_name = GetSyntacticAnalyzerName( language_id );

if( proc_name.empty() )
{
lem::MemFormatter mem;
const SG_Language & lang = GetDict().GetSynGram().languages()[language_id];
mem.printf( "There is no procedure for language %us (id=%d) to perform syntactic analysis", lang.GetName().c_str(), language_id );
}

// Äëÿ êàæäîãî ÿçûêà ìîæåò áûòü çàäàíà îòäåëüíàÿ ïðîöåäóðà ñèíòàíà. Åå èìÿ óêàçàíî â ïàðàìåòðàõ ÿçûêà.
const int language_id = lexer.GetParams().GetLanguageID();
const lem::UCString & proc_name = GetSyntacticAnalyzerName(language_id);

Res_Pack * res = new Res_Pack();

const LexerTextPos * begin = lexer.GetBeginToken();

lem::MCollect<const LexerTextPos*> leafs;
lexer.FetchEnds( begin, leafs, lexer.GetParams().GetMaxRightLeaves() );
if (proc_name.empty())
{
lem::MemFormatter mem;
const SG_Language & lang = GetDict().GetSynGram().languages()[language_id];
mem.printf("There is no procedure for language %us (id=%d) to perform syntactic analysis", lang.GetName().c_str(), language_id);
}

lem::MCollect<const LexerTextPos*> nodes;
lem::MCollect<const LexerTextPos*> nodes2;

lem::ElapsedTime timer;
timer.start();
Res_Pack * res = new Res_Pack();

for( lem::Container::size_type i=0; i<leafs.size(); ++i )
{
LINKAGE_EDGES * edges = NULL;
const LexerTextPos * begin = lexer.GetBeginToken();

const LexerTextPos * leaf = leafs[i];
nodes.clear();
leaf->Collect_Right2Left( begin, nodes ); // ñîáèðàåì âñå ñëîâîôîðìû íà ýòîì ïóòè
lem::MCollect<const LexerTextPos*> leafs;
lexer.FetchEnds(begin, leafs, lexer.GetParams().GetMaxRightLeaves());

const int src_token_count=CastSizeToInt(nodes.size()-2); // Ñêîëüêî èñõîäíûõ òîêåíîâ, íå ó÷èòûâàÿ ïðîïóùåííûå, âîøëî â ôðàãìåíòû â nodes2.
lem::MCollect<const LexerTextPos*> nodes;
lem::MCollect<const LexerTextPos*> nodes2;

Variator * var = new Variator();
lem::ElapsedTime timer;
timer.start();

// ñëîâîôîðìû ñîáðàíû â îáðàòíîì ïîðÿäêå, ðàçâåðíåì åãî.
for( int j=CastSizeToInt(nodes.size())-1; j>=0; --j )
for (lem::Container::size_type i = 0; i < leafs.size(); ++i)
{
const LexerTextPos * token = nodes[j];
Tree_Node * node = new Tree_Node( new Word_Form(*token->GetWordform()), false );
var->Add( node );
LINKAGE_EDGES * edges = NULL;

const LexerTextPos * leaf = leafs[i];
nodes.clear();
leaf->Collect_Right2Left(begin, nodes); // ñîáèðàåì âñå ñëîâîôîðìû íà ýòîì ïóòè

const int src_token_count = CastSizeToInt(nodes.size() - 2); // Ñêîëüêî èñõîäíûõ òîêåíîâ, íå ó÷èòûâàÿ ïðîïóùåííûå, âîøëî â ôðàãìåíòû â nodes2.

Variator * var = new Variator();

// ñëîâîôîðìû ñîáðàíû â îáðàòíîì ïîðÿäêå, ðàçâåðíåì åãî.
for (int j = CastSizeToInt(nodes.size()) - 1; j >= 0; --j)
{
const LexerTextPos * token = nodes[j];
Tree_Node * node = new Tree_Node(new Word_Form(*token->GetWordform()), false);
var->Add(node);
}

res->Add(var);

if (lexer.GetParams().timeout.max_elapsed_millisecs > 0)
{
const int elapsed_millisec = timer.msec();
if (elapsed_millisec >= lexer.GetParams().timeout.max_elapsed_millisecs)
break;
}

if (lexer.GetParams().timeout.max_variators > 0)
{
if (CastSizeToInt(i) >= lexer.GetParams().timeout.max_variators)
break;
}
}

res->Add(var);

if( lexer.GetParams().timeout.max_elapsed_millisecs>0 )
{
const int elapsed_millisec = timer.msec();
if( elapsed_millisec>=lexer.GetParams().timeout.max_elapsed_millisecs )
break;
}

if( lexer.GetParams().timeout.max_variators>0 )
{
if( CastSizeToInt(i) >= lexer.GetParams().timeout.max_variators )
break;
}
}

#if LEM_DEBUGGING==1
//res->Print( *lem::mout, GetDict().GetSynGram(), true );
#endif
#if LEM_DEBUGGING==1
//res->Print( *lem::mout, GetDict().GetSynGram(), true );
#endif

return res;
return res;
}
#endif

0 comments on commit 444ab7a

Please sign in to comment.