Refactoring of tokenizer and stream classes for better efficiency and maintainability.

This commit is contained in:
2017-06-21 02:56:27 +02:00
parent 97c6e58355
commit 39c0e27cb2
38 changed files with 466 additions and 512 deletions

View File

@@ -60,7 +60,6 @@ set(sources
set(libraries
${Boost_LIBRARIES}
tokenize
pddlparse
pthread
)

View File

@@ -136,7 +136,7 @@ void Logger::log(Priority priority, const tokenize::Location &location, const ch
m_errorStream
<< LocationFormat
<< location.sectionStart() << ":" << location.rowStart() << ":" << location.columnStart() << ":"
<< location.sectionStart << ":" << location.rowStart << ":" << location.columnStart << ":"
<< ResetFormat() << " "
<< priorityFormat(priority) << priorityName(priority) << ":"
<< ResetFormat() << " "

View File

@@ -173,7 +173,7 @@ void Description::parseContent(tokenize::Tokenizer<> &tokenizer)
tokenizer.skipWhiteSpace();
if (!tokenizer.atEnd())
throw tokenize::TokenizerException(tokenizer, "expected end of SAS description (perhaps, input contains two SAS descriptions?)");
throw tokenize::TokenizerException(tokenizer.location(), "expected end of SAS description (perhaps, input contains two SAS descriptions?)");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -185,7 +185,7 @@ void Description::parseVersionSection(tokenize::Tokenizer<> &tokenizer) const
const auto formatVersion = tokenizer.get<size_t>();
if (formatVersion != 3)
throw tokenize::TokenizerException(tokenizer, "unsupported SAS format version (" + std::to_string(formatVersion) + ")");
throw tokenize::TokenizerException(tokenizer.location(), "unsupported SAS format version (" + std::to_string(formatVersion) + ")");
tokenizer.expect<std::string>("end_version");
}

View File

@@ -29,7 +29,7 @@ MutexGroup MutexGroup::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variables
mutexGroup.m_facts.emplace_back(Fact::fromSAS(tokenizer, variables));
if (mutexGroup.m_facts[j].value() == Value::None)
throw tokenize::TokenizerException(tokenizer, "mutex groups must not contain <none of those> values");
throw tokenize::TokenizerException(tokenizer.location(), "mutex groups must not contain <none of those> values");
}
tokenizer.expect<std::string>("end_mutex_group");

View File

@@ -44,7 +44,7 @@ Predicate Predicate::fromSAS(tokenize::Tokenizer<> &tokenizer)
}
catch (const std::exception &e)
{
throw tokenize::TokenizerException(tokenizer, "could not parse operator predicate");
throw tokenize::TokenizerException(tokenizer.location(), "could not parse operator predicate");
}
return predicate;

View File

@@ -75,7 +75,7 @@ Value Value::fromSAS(tokenize::Tokenizer<> &tokenizer)
else if (sasSign == "NegatedAtom")
value.m_sign = Value::Sign::Negative;
else
throw tokenize::TokenizerException(tokenizer, "invalid value sign “" + sasSign + "");
throw tokenize::TokenizerException(tokenizer.location(), "invalid value sign “" + sasSign + "");
try
{
@@ -91,7 +91,7 @@ Value Value::fromSAS(tokenize::Tokenizer<> &tokenizer)
}
catch (const std::exception &e)
{
throw tokenize::TokenizerException(tokenizer, std::string("could not parse variable value (") + e.what() + ")");
throw tokenize::TokenizerException(tokenizer.location(), std::string("could not parse variable value (") + e.what() + ")");
}
return value;
@@ -107,7 +107,7 @@ const Value &Value::referenceFromSAS(tokenize::Tokenizer<> &tokenizer, const Var
return Value::Any;
if (valueID < 0 || static_cast<size_t>(valueID) >= variable.values().size())
throw tokenize::TokenizerException(tokenizer, "value index out of range (variable " + variable.name() + ", index " + std::to_string(valueID) + ")");
throw tokenize::TokenizerException(tokenizer.location(), "value index out of range (variable " + variable.name() + ", index " + std::to_string(valueID) + ")");
return variable.values()[valueID];
}

View File

@@ -43,7 +43,7 @@ Variable Variable::fromSAS(tokenize::Tokenizer<> &tokenizer)
// <none of those> values are only allowed at the end
if (j < numberOfValues - 1 && variable.m_values[j] == Value::None)
throw tokenize::TokenizerException(tokenizer, "<none of those> value must be the last value of a variable");
throw tokenize::TokenizerException(tokenizer.location(), "<none of those> value must be the last value of a variable");
}
tokenizer.expect<std::string>("end_variable");
@@ -66,7 +66,7 @@ const Variable &Variable::referenceFromSAS(tokenize::Tokenizer<> &tokenizer, con
const auto variableID = tokenizer.get<size_t>();
if (variableID >= variables.size())
throw tokenize::TokenizerException(tokenizer, "variable index out of range (index " + std::to_string(variableID) + ")");
throw tokenize::TokenizerException(tokenizer.location(), "variable index out of range (index " + std::to_string(variableID) + ")");
return variables[variableID];
}