Renamed tokenizing module for clarity.

This commit is contained in:
2017-05-12 14:17:57 +02:00
parent c10187f6ba
commit e312a91632
77 changed files with 854 additions and 861 deletions

View File

@@ -27,7 +27,7 @@ file(GLOB utils_headers "../include/plasp/utils/*.h")
set(includes
${Boost_INCLUDE_DIRS}
${PROJECT_SOURCE_DIR}/include
${PROJECT_SOURCE_DIR}/lib/parsebase/include
${PROJECT_SOURCE_DIR}/lib/tokenize/include
)
set(sources
@@ -58,7 +58,7 @@ set(sources
set(libraries
${Boost_LIBRARIES}
parsebase
tokenize
pthread
)

View File

@@ -123,7 +123,7 @@ void Logger::log(Priority priority, const std::string &message)
////////////////////////////////////////////////////////////////////////////////////////////////////
void Logger::log(Priority priority, const parsebase::Location &location, const char *message)
void Logger::log(Priority priority, const tokenize::Location &location, const char *message)
{
const auto priorityID = static_cast<int>(priority);
@@ -150,7 +150,7 @@ void Logger::log(Priority priority, const parsebase::Location &location, const c
////////////////////////////////////////////////////////////////////////////////////////////////////
void Logger::log(Priority priority, const parsebase::Location &location, const std::string &message)
void Logger::log(Priority priority, const tokenize::Location &location, const std::string &message)
{
log(priority, location, message.c_str());
}

View File

@@ -20,14 +20,14 @@ namespace pddl
void Action::parseDeclaration(Context &context, Domain &domain)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
auto action = std::make_unique<Action>(Action());
action->m_name = parser.parseIdentifier();
action->m_name = tokenizer.getIdentifier();
parser.expect<std::string>(":parameters");
parser.expect<std::string>("(");
tokenizer.expect<std::string>(":parameters");
tokenizer.expect<std::string>("(");
ExpressionContext expressionContext(domain);
expressionContext.variables.push(&action->m_parameters);
@@ -35,19 +35,19 @@ void Action::parseDeclaration(Context &context, Domain &domain)
// Read parameters
expressions::Variable::parseTypedDeclarations(context, expressionContext, action->m_parameters);
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
// Parse preconditions and effects
while (!parser.testAndReturn(')'))
while (!tokenizer.testAndReturn(')'))
{
parser.expect<std::string>(":");
tokenizer.expect<std::string>(":");
if (parser.testIdentifierAndSkip("precondition"))
if (tokenizer.testIdentifierAndSkip("precondition"))
action->m_precondition = parsePreconditionExpression(context, expressionContext);
else if (parser.testIdentifierAndSkip("effect"))
else if (tokenizer.testIdentifierAndSkip("effect"))
action->m_effect = parseEffectExpression(context, expressionContext);
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
// Store new action

View File

@@ -6,7 +6,7 @@
#include <plasp/pddl/ConsistencyException.h>
#include <plasp/pddl/IO.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -44,7 +44,7 @@ Description Description::fromStream(std::istream &istream, Context &context)
{
Description description(context);
description.m_context.parser.read("std::cin", istream);
description.m_context.tokenizer.read("std::cin", istream);
description.parse();
return description;
@@ -56,7 +56,7 @@ Description Description::fromFile(const std::string &path, Context &context)
{
Description description(context);
description.m_context.parser.read(path);
description.m_context.tokenizer.read(path);
description.parse();
return description;
@@ -75,7 +75,7 @@ Description Description::fromFiles(const std::vector<std::string> &paths, Contex
std::for_each(paths.cbegin(), paths.cend(),
[&](const auto &path)
{
description.m_context.parser.read(path);
description.m_context.tokenizer.read(path);
});
description.parse();
@@ -126,9 +126,9 @@ const Problem &Description::problem() const
void Description::parse()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.removeComments(";", "\n", false);
tokenizer.removeComments(";", "\n", false);
// First, determine the locations of domain and problem
findSections();
@@ -136,12 +136,12 @@ void Description::parse()
if (m_domainPosition == -1)
throw ConsistencyException("no PDDL domain specified");
parser.seek(m_domainPosition);
tokenizer.seek(m_domainPosition);
m_domain->parse();
if (m_problemPosition != -1)
{
parser.seek(m_problemPosition);
tokenizer.seek(m_problemPosition);
m_problem->parse();
}
@@ -152,47 +152,47 @@ void Description::parse()
void Description::findSections()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
while (!parser.atEnd())
while (!tokenizer.atEnd())
{
const auto position = parser.position();
const auto position = tokenizer.position();
parser.expect<std::string>("(");
parser.expect<std::string>("define");
parser.expect<std::string>("(");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>("define");
tokenizer.expect<std::string>("(");
if (parser.testAndSkip<std::string>("domain"))
if (tokenizer.testAndSkip<std::string>("domain"))
{
if (m_domainPosition != -1)
throw parsebase::ParserException(parser.location(), "PDDL description may not contain two domains");
throw tokenize::TokenizerException(tokenizer.location(), "PDDL description may not contain two domains");
m_domainPosition = position;
parser.seek(position);
tokenizer.seek(position);
m_domain->findSections();
}
else if (m_context.parser.testAndSkip<std::string>("problem"))
else if (tokenizer.testAndSkip<std::string>("problem"))
{
if (m_problemPosition != -1)
throw parsebase::ParserException(parser.location(), "PDDL description may currently not contain two problems");
throw tokenize::TokenizerException(tokenizer.location(), "PDDL description may currently not contain two problems");
m_problem = std::make_unique<Problem>(Problem(m_context, *m_domain));
m_problemPosition = position;
parser.seek(position);
tokenizer.seek(position);
m_problem->findSections();
}
else
{
const auto sectionIdentifier = parser.parse<std::string>();
throw parsebase::ParserException(parser.location(), "unknown PDDL section “" + sectionIdentifier + "");
const auto sectionIdentifier = tokenizer.get<std::string>();
throw tokenize::TokenizerException(tokenizer.location(), "unknown PDDL section “" + sectionIdentifier + "");
}
m_context.parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
}

View File

@@ -9,7 +9,7 @@
#include <plasp/pddl/expressions/PrimitiveType.h>
#include <plasp/pddl/expressions/Variable.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -35,119 +35,119 @@ Domain::Domain(Context &context)
void Domain::findSections()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>("define");
parser.expect<std::string>("(");
parser.expect<std::string>("domain");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>("define");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>("domain");
m_name = m_context.parser.parseIdentifier();
m_name = tokenizer.getIdentifier();
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
const auto setSectionPosition =
[&](const std::string &sectionName, auto &sectionPosition, const auto value, bool unique = false)
{
if (unique && sectionPosition != -1)
{
parser.seek(value);
throw parsebase::ParserException(parser.location(), "only one “:" + sectionName + "” section allowed");
tokenizer.seek(value);
throw tokenize::TokenizerException(tokenizer.location(), "only one “:" + sectionName + "” section allowed");
}
sectionPosition = value;
};
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
// Find sections
while (parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
const auto position = parser.position();
const auto position = tokenizer.position();
parser.expect<std::string>("(");
parser.expect<std::string>(":");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
const auto sectionIdentifierPosition = parser.position();
const auto sectionIdentifierPosition = tokenizer.position();
// Save the parser position of the individual sections for later parsing
if (parser.testIdentifierAndSkip("requirements"))
// Save the tokenizer position of the individual sections for later parsing
if (tokenizer.testIdentifierAndSkip("requirements"))
setSectionPosition("requirements", m_requirementsPosition, position, true);
else if (parser.testIdentifierAndSkip("types"))
else if (tokenizer.testIdentifierAndSkip("types"))
setSectionPosition("types", m_typesPosition, position, true);
else if (parser.testIdentifierAndSkip("constants"))
else if (tokenizer.testIdentifierAndSkip("constants"))
setSectionPosition("constants", m_constantsPosition, position, true);
else if (parser.testIdentifierAndSkip("predicates"))
else if (tokenizer.testIdentifierAndSkip("predicates"))
setSectionPosition("predicates", m_predicatesPosition, position, true);
else if (parser.testIdentifierAndSkip("action"))
else if (tokenizer.testIdentifierAndSkip("action"))
{
m_actionPositions.emplace_back(-1);
setSectionPosition("action", m_actionPositions.back(), position);
}
else if (parser.testIdentifierAndSkip("functions")
|| parser.testIdentifierAndSkip("constraints")
|| parser.testIdentifierAndSkip("durative-action")
|| parser.testIdentifierAndSkip("derived"))
else if (tokenizer.testIdentifierAndSkip("functions")
|| tokenizer.testIdentifierAndSkip("constraints")
|| tokenizer.testIdentifierAndSkip("durative-action")
|| tokenizer.testIdentifierAndSkip("derived"))
{
parser.seek(sectionIdentifierPosition);
tokenizer.seek(sectionIdentifierPosition);
const auto sectionIdentifier = parser.parseIdentifier();
const auto sectionIdentifier = tokenizer.getIdentifier();
m_context.logger.log(output::Priority::Warning, parser.location(), "section type “" + sectionIdentifier + "” currently unsupported");
m_context.logger.log(output::Priority::Warning, tokenizer.location(), "section type “" + sectionIdentifier + "” currently unsupported");
parser.seek(sectionIdentifierPosition);
tokenizer.seek(sectionIdentifierPosition);
}
else
{
const auto sectionIdentifier = parser.parseIdentifier();
const auto sectionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
throw parsebase::ParserException(parser.location(), "unknown domain section “" + sectionIdentifier + "");
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "unknown domain section “" + sectionIdentifier + "");
}
// Skip section for now and parse it later
skipSection(parser);
skipSection(tokenizer);
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Domain::parse()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
if (m_requirementsPosition != -1)
{
parser.seek(m_requirementsPosition);
tokenizer.seek(m_requirementsPosition);
parseRequirementSection();
}
if (m_typesPosition != -1)
{
parser.seek(m_typesPosition);
tokenizer.seek(m_typesPosition);
parseTypeSection();
}
if (m_constantsPosition != -1)
{
parser.seek(m_constantsPosition);
tokenizer.seek(m_constantsPosition);
parseConstantSection();
}
if (m_predicatesPosition != -1)
{
parser.seek(m_predicatesPosition);
tokenizer.seek(m_predicatesPosition);
parsePredicateSection();
}
for (size_t i = 0; i < m_actionPositions.size(); i++)
if (m_actionPositions[i] != -1)
{
parser.seek(m_actionPositions[i]);
tokenizer.seek(m_actionPositions[i]);
parseActionSection();
}
@@ -249,19 +249,19 @@ const expressions::DerivedPredicates &Domain::derivedPredicates() const
void Domain::parseRequirementSection()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>(":");
parser.expect<std::string>("requirements");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
tokenizer.expect<std::string>("requirements");
while (parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
parser.expect<std::string>(":");
tokenizer.expect<std::string>(":");
m_requirements.emplace_back(Requirement::parse(m_context));
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
// TODO: do this check only once the problem is parsed
@@ -269,7 +269,7 @@ void Domain::parseRequirementSection()
if (m_requirements.empty())
m_requirements.emplace_back(Requirement::Type::STRIPS);
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -292,7 +292,7 @@ void Domain::checkRequirement(Requirement::Type requirementType)
if (hasRequirement(requirementType))
return;
m_context.logger.log(output::Priority::Warning, m_context.parser.location(), "requirement “" + Requirement(requirementType).toPDDL() + "” used but never declared");
m_context.logger.log(output::Priority::Warning, m_context.tokenizer.location(), "requirement “" + Requirement(requirementType).toPDDL() + "” used but never declared");
m_requirements.push_back(requirementType);
}
@@ -341,82 +341,82 @@ void Domain::computeDerivedRequirements()
void Domain::parseTypeSection()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>(":");
parser.expect<std::string>("types");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
tokenizer.expect<std::string>("types");
checkRequirement(Requirement::Type::Typing);
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
// Store types and their parent types
while (parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
if (parser.currentCharacter() == '(')
throw parsebase::ParserException(parser.location(), "only primitive types are allowed in type section");
if (tokenizer.currentCharacter() == '(')
throw tokenize::TokenizerException(tokenizer.location(), "only primitive types are allowed in type section");
expressions::PrimitiveType::parseTypedDeclaration(m_context, *this);
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Domain::parseConstantSection()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>(":");
parser.expect<std::string>("constants");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
tokenizer.expect<std::string>("constants");
// Store constants
expressions::Constant::parseTypedDeclarations(m_context, *this);
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Domain::parsePredicateSection()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>(":");
parser.expect<std::string>("predicates");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
tokenizer.expect<std::string>("predicates");
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
// Store predicates and their arguments
while (parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
expressions::PredicateDeclaration::parse(m_context, *this);
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Domain::parseActionSection()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>(":");
parser.expect<std::string>("action");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
tokenizer.expect<std::string>("action");
Action::parseDeclaration(m_context, *this);
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@@ -16,7 +16,7 @@
#include <plasp/pddl/expressions/Unsupported.h>
#include <plasp/pddl/expressions/When.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -100,9 +100,9 @@ ExpressionPointer parsePredicate(Context &context, ExpressionContext &expression
ExpressionPointer parsePreconditionExpression(Context &context,
ExpressionContext &expressionContext)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
ExpressionPointer expression;
@@ -112,23 +112,23 @@ ExpressionPointer parsePreconditionExpression(Context &context,
return expression;
}
const auto position = parser.position();
const auto position = tokenizer.position();
parser.expect<std::string>("(");
tokenizer.expect<std::string>("(");
const auto expressionIdentifierPosition = parser.position();
const auto expressionIdentifierPosition = tokenizer.position();
if (parser.testIdentifierAndSkip("preference"))
if (tokenizer.testIdentifierAndSkip("preference"))
{
// TODO: refactor
parser.seek(expressionIdentifierPosition);
const auto expressionIdentifier = parser.parseIdentifier();
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
tokenizer.seek(position);
return expressions::Unsupported::parse(context);
}
parser.seek(position);
tokenizer.seek(position);
return parseExpression(context, expressionContext);
}
@@ -136,9 +136,9 @@ ExpressionPointer parsePreconditionExpression(Context &context,
ExpressionPointer parseExpression(Context &context, ExpressionContext &expressionContext)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
ExpressionPointer expression;
@@ -153,43 +153,43 @@ ExpressionPointer parseExpression(Context &context, ExpressionContext &expressio
return expression;
}
const auto position = parser.position();
const auto position = tokenizer.position();
parser.expect<std::string>("(");
tokenizer.expect<std::string>("(");
const auto expressionIdentifierPosition = parser.position();
const auto expressionIdentifierPosition = tokenizer.position();
if (parser.testIdentifierAndSkip("-")
|| parser.testIdentifierAndSkip("=")
|| parser.testIdentifierAndSkip("*")
|| parser.testIdentifierAndSkip("+")
|| parser.testIdentifierAndSkip("-")
|| parser.testIdentifierAndSkip("/")
|| parser.testIdentifierAndSkip(">")
|| parser.testIdentifierAndSkip("<")
|| parser.testIdentifierAndSkip("=")
|| parser.testIdentifierAndSkip(">=")
|| parser.testIdentifierAndSkip("<="))
if (tokenizer.testIdentifierAndSkip("-")
|| tokenizer.testIdentifierAndSkip("=")
|| tokenizer.testIdentifierAndSkip("*")
|| tokenizer.testIdentifierAndSkip("+")
|| tokenizer.testIdentifierAndSkip("-")
|| tokenizer.testIdentifierAndSkip("/")
|| tokenizer.testIdentifierAndSkip(">")
|| tokenizer.testIdentifierAndSkip("<")
|| tokenizer.testIdentifierAndSkip("=")
|| tokenizer.testIdentifierAndSkip(">=")
|| tokenizer.testIdentifierAndSkip("<="))
{
parser.seek(expressionIdentifierPosition);
const auto expressionIdentifier = parser.parseIdentifier();
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
tokenizer.seek(position);
return expressions::Unsupported::parse(context);
}
parser.seek(expressionIdentifierPosition);
const auto expressionIdentifier = parser.parseIdentifier();
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
throw parsebase::ParserException(parser.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
ExpressionPointer parseEffectExpression(Context &context, ExpressionContext &expressionContext)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
ExpressionPointer expression;
@@ -200,22 +200,22 @@ ExpressionPointer parseEffectExpression(Context &context, ExpressionContext &exp
return expression;
}
const auto position = parser.position();
const auto position = tokenizer.position();
parser.expect<std::string>("(");
tokenizer.expect<std::string>("(");
const auto expressionIdentifierPosition = parser.position();
const auto expressionIdentifierPosition = tokenizer.position();
if (parser.testIdentifierAndSkip("when"))
if (tokenizer.testIdentifierAndSkip("when"))
{
parser.seek(expressionIdentifierPosition);
const auto expressionIdentifier = parser.parseIdentifier();
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
tokenizer.seek(position);
return expressions::Unsupported::parse(context);
}
parser.seek(position);
tokenizer.seek(position);
return parseEffectBodyExpression(context, expressionContext);
}
@@ -223,7 +223,7 @@ ExpressionPointer parseEffectExpression(Context &context, ExpressionContext &exp
ExpressionPointer parseEffectBodyExpression(Context &context, ExpressionContext &expressionContext)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
ExpressionPointer expression;
@@ -233,31 +233,31 @@ ExpressionPointer parseEffectBodyExpression(Context &context, ExpressionContext
return expression;
}
const auto position = parser.position();
const auto position = tokenizer.position();
parser.expect<std::string>("(");
tokenizer.expect<std::string>("(");
const auto expressionIdentifierPosition = parser.position();
const auto expressionIdentifierPosition = tokenizer.position();
if (parser.testIdentifierAndSkip("=")
|| parser.testIdentifierAndSkip("assign")
|| parser.testIdentifierAndSkip("scale-up")
|| parser.testIdentifierAndSkip("scale-down")
|| parser.testIdentifierAndSkip("increase")
|| parser.testIdentifierAndSkip("decrease"))
if (tokenizer.testIdentifierAndSkip("=")
|| tokenizer.testIdentifierAndSkip("assign")
|| tokenizer.testIdentifierAndSkip("scale-up")
|| tokenizer.testIdentifierAndSkip("scale-down")
|| tokenizer.testIdentifierAndSkip("increase")
|| tokenizer.testIdentifierAndSkip("decrease"))
{
parser.seek(expressionIdentifierPosition);
const auto expressionIdentifier = parser.parseIdentifier();
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
tokenizer.seek(position);
return expressions::Unsupported::parse(context);
}
parser.seek(expressionIdentifierPosition);
const auto expressionIdentifier = parser.parseIdentifier();
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
throw parsebase::ParserException(parser.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -276,14 +276,12 @@ ExpressionPointer parseConditionalEffectExpression(Context &context, ExpressionC
ExpressionPointer parsePredicate(Context &context, ExpressionContext &expressionContext)
{
auto &parser = context.parser;
ExpressionPointer expression;
if ((expression = expressions::Predicate::parse(context, expressionContext)))
return expression;
throw parsebase::ParserException(parser.location(), "expected predicate");
throw tokenize::TokenizerException(context.tokenizer.location(), "expected predicate");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -305,30 +303,30 @@ ExpressionPointer parseLiteral(Context &context, ExpressionContext &expressionCo
ExpressionPointer parseAtomicFormula(Context &context, ExpressionContext &expressionContext)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
ExpressionPointer expression;
if ((expression = expressions::Predicate::parse(context, expressionContext)))
return expression;
const auto position = parser.position();
const auto position = tokenizer.position();
if (!parser.testAndSkip<std::string>("("))
if (!tokenizer.testAndSkip<std::string>("("))
return nullptr;
const auto expressionIdentifierPosition = parser.position();
const auto expressionIdentifierPosition = tokenizer.position();
if (parser.testIdentifierAndSkip("="))
if (tokenizer.testIdentifierAndSkip("="))
{
parser.seek(expressionIdentifierPosition);
const auto expressionIdentifier = parser.parseIdentifier();
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
tokenizer.seek(position);
return expressions::Unsupported::parse(context);
}
parser.seek(position);
tokenizer.seek(position);
return nullptr;
}

View File

@@ -8,7 +8,7 @@
#include <plasp/pddl/expressions/Predicate.h>
#include <plasp/pddl/expressions/Unsupported.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -24,7 +24,7 @@ namespace pddl
std::unique_ptr<InitialState> InitialState::parseDeclaration(Context &context,
ExpressionContext &expressionContext)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
auto initialState = std::make_unique<InitialState>(InitialState());
@@ -40,38 +40,38 @@ std::unique_ptr<InitialState> InitialState::parseDeclaration(Context &context,
return expression;
}
const auto position = parser.position();
const auto position = tokenizer.position();
parser.expect<std::string>("(");
tokenizer.expect<std::string>("(");
const auto expressionIdentifierPosition = parser.position();
const auto expressionIdentifierPosition = tokenizer.position();
if (parser.testIdentifierAndSkip("="))
if (tokenizer.testIdentifierAndSkip("="))
{
parser.seek(expressionIdentifierPosition);
const auto expressionIdentifier = parser.parseIdentifier();
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
tokenizer.seek(position);
return expressions::Unsupported::parse(context);
}
parser.seek(expressionIdentifierPosition);
const auto expressionIdentifier = parser.parseIdentifier();
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
throw parsebase::ParserException(parser.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
};
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
while (parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
ExpressionPointer expression;
if ((expression = parseInitialStateElement()))
initialState->m_facts.emplace_back(std::move(expression));
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
return initialState;

View File

@@ -8,7 +8,7 @@
#include <plasp/pddl/IO.h>
#include <plasp/pddl/expressions/Constant.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -36,114 +36,114 @@ Problem::Problem(Context &context, Domain &domain)
void Problem::findSections()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>("define");
parser.expect<std::string>("(");
parser.expect<std::string>("problem");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>("define");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>("problem");
m_name = parser.parseIdentifier();
m_name = tokenizer.getIdentifier();
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
const auto setSectionPosition =
[&](const std::string &sectionName, auto &sectionPosition, const auto value, bool unique = false)
{
if (unique && sectionPosition != -1)
{
parser.seek(value);
throw parsebase::ParserException(parser.location(), "only one “:" + sectionName + "” section allowed");
tokenizer.seek(value);
throw tokenize::TokenizerException(tokenizer.location(), "only one “:" + sectionName + "” section allowed");
}
sectionPosition = value;
};
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
while (parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
const auto position = parser.position();
const auto position = tokenizer.position();
parser.expect<std::string>("(");
parser.expect<std::string>(":");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
const auto sectionIdentifierPosition = parser.position();
const auto sectionIdentifierPosition = tokenizer.position();
// TODO: check order of the sections
if (parser.testIdentifierAndSkip("domain"))
if (tokenizer.testIdentifierAndSkip("domain"))
setSectionPosition("domain", m_domainPosition, position, true);
else if (parser.testIdentifierAndSkip("requirements"))
else if (tokenizer.testIdentifierAndSkip("requirements"))
setSectionPosition("requirements", m_requirementsPosition, position, true);
else if (parser.testIdentifierAndSkip("objects"))
else if (tokenizer.testIdentifierAndSkip("objects"))
setSectionPosition("objects", m_objectsPosition, position, true);
else if (parser.testIdentifierAndSkip("init"))
else if (tokenizer.testIdentifierAndSkip("init"))
setSectionPosition("init", m_initialStatePosition, position, true);
else if (parser.testIdentifierAndSkip("goal"))
else if (tokenizer.testIdentifierAndSkip("goal"))
setSectionPosition("goal", m_goalPosition, position, true);
else if (parser.testIdentifierAndSkip("constraints")
|| parser.testIdentifierAndSkip("metric")
|| parser.testIdentifierAndSkip("length"))
else if (tokenizer.testIdentifierAndSkip("constraints")
|| tokenizer.testIdentifierAndSkip("metric")
|| tokenizer.testIdentifierAndSkip("length"))
{
parser.seek(sectionIdentifierPosition);
tokenizer.seek(sectionIdentifierPosition);
const auto sectionIdentifier = parser.parseIdentifier();
const auto sectionIdentifier = tokenizer.getIdentifier();
m_context.logger.log(output::Priority::Warning, parser.location(), "section type “" + sectionIdentifier + "” currently unsupported");
m_context.logger.log(output::Priority::Warning, tokenizer.location(), "section type “" + sectionIdentifier + "” currently unsupported");
parser.seek(sectionIdentifierPosition);
tokenizer.seek(sectionIdentifierPosition);
}
else
{
const auto sectionIdentifier = parser.parseIdentifier();
const auto sectionIdentifier = tokenizer.getIdentifier();
parser.seek(position);
throw parsebase::ParserException(parser.location(), "unknown problem section “" + sectionIdentifier + "");
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "unknown problem section “" + sectionIdentifier + "");
}
// Skip section for now and parse it later
skipSection(parser);
skipSection(tokenizer);
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Problem::parse()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
if (m_domainPosition == -1)
throw ConsistencyException("problem description does not specify the corresponding domain");
parser.seek(m_domainPosition);
tokenizer.seek(m_domainPosition);
parseDomainSection();
if (m_requirementsPosition != -1)
{
parser.seek(m_requirementsPosition);
tokenizer.seek(m_requirementsPosition);
parseRequirementSection();
}
if (m_objectsPosition != -1)
{
parser.seek(m_objectsPosition);
tokenizer.seek(m_objectsPosition);
parseObjectSection();
}
if (m_initialStatePosition == -1)
throw ConsistencyException("problem description does not specify an initial state");
parser.seek(m_initialStatePosition);
tokenizer.seek(m_initialStatePosition);
parseInitialStateSection();
if (m_goalPosition == -1)
throw ConsistencyException("problem description does not specify a goal");
parser.seek(m_goalPosition);
tokenizer.seek(m_goalPosition);
parseGoalSection();
}
@@ -193,41 +193,41 @@ const expressions::Constants &Problem::objects() const
void Problem::parseDomainSection()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>(":");
parser.expect<std::string>("domain");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
tokenizer.expect<std::string>("domain");
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
const auto domainName = parser.parseIdentifier();
const auto domainName = tokenizer.getIdentifier();
if (m_domain.name() != domainName)
throw parsebase::ParserException(parser.location(), "domains do not match (“" + m_domain.name() + "” and “" + domainName + "”)");
throw tokenize::TokenizerException(tokenizer.location(), "domains do not match (“" + m_domain.name() + "” and “" + domainName + "”)");
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Problem::parseRequirementSection()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>(":");
parser.expect<std::string>("requirements");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
tokenizer.expect<std::string>("requirements");
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
while (parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
parser.expect<std::string>(":");
tokenizer.expect<std::string>(":");
m_requirements.emplace_back(Requirement::parse(m_context));
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
// TODO: do this check only once the domain is parsed
@@ -235,7 +235,7 @@ void Problem::parseRequirementSection()
if (m_requirements.empty())
m_requirements.emplace_back(Requirement::Type::STRIPS);
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -261,7 +261,7 @@ void Problem::checkRequirement(Requirement::Type requirementType)
if (hasRequirement(requirementType))
return;
m_context.logger.log(output::Priority::Warning, m_context.parser.location(), "requirement “" + Requirement(requirementType).toPDDL() + "” used but never declared");
m_context.logger.log(output::Priority::Warning, m_context.tokenizer.location(), "requirement “" + Requirement(requirementType).toPDDL() + "” used but never declared");
m_requirements.push_back(requirementType);
}
@@ -310,52 +310,52 @@ void Problem::computeDerivedRequirements()
void Problem::parseObjectSection()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>(":");
parser.expect<std::string>("objects");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
tokenizer.expect<std::string>("objects");
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
// Store constants
expressions::Constant::parseTypedDeclarations(m_context, *this);
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Problem::parseInitialStateSection()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>(":");
parser.expect<std::string>("init");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
tokenizer.expect<std::string>("init");
ExpressionContext expressionContext(m_domain, this);
m_initialState = InitialState::parseDeclaration(m_context, expressionContext);
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Problem::parseGoalSection()
{
auto &parser = m_context.parser;
auto &tokenizer = m_context.tokenizer;
parser.expect<std::string>("(");
parser.expect<std::string>(":");
parser.expect<std::string>("goal");
tokenizer.expect<std::string>("(");
tokenizer.expect<std::string>(":");
tokenizer.expect<std::string>("goal");
ExpressionContext expressionContext(m_domain, this);
m_goal = parsePreconditionExpression(m_context, expressionContext);
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
}
////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@@ -4,7 +4,7 @@
#include <boost/assign.hpp>
#include <boost/bimap.hpp>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -82,19 +82,19 @@ Requirement::Requirement(Requirement::Type type)
Requirement Requirement::parse(Context &context)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
const auto requirementName = parser.parseIdentifier();
const auto requirementName = tokenizer.getIdentifier();
const auto match = requirementTypesToPDDL.right.find(requirementName);
if (match == requirementTypesToPDDL.right.end())
throw parsebase::ParserException(parser.location(), "unknown PDDL requirement “" + requirementName + "");
throw tokenize::TokenizerException(tokenizer.location(), "unknown PDDL requirement “" + requirementName + "");
const auto requirementType = match->second;
if (requirementType == Requirement::Type::GoalUtilities)
context.logger.log(output::Priority::Warning, parser.location(), "requirement “goal-utilities” is not part of the PDDL 3.1 specification");
context.logger.log(output::Priority::Warning, tokenizer.location(), "requirement “goal-utilities” is not part of the PDDL 3.1 specification");
return Requirement(match->second);
}

View File

@@ -29,13 +29,13 @@ void VariableStack::pop()
expressions::VariablePointer VariableStack::parseAndFind(plasp::pddl::Context &context)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
parser.expect<std::string>("?");
tokenizer.expect<std::string>("?");
const auto variableName = parser.parseIdentifier();
const auto variableName = tokenizer.getIdentifier();
for (auto i = m_variableStack.crbegin(); i != m_variableStack.crend(); i++)
{
@@ -53,7 +53,7 @@ expressions::VariablePointer VariableStack::parseAndFind(plasp::pddl::Context &c
return match->get();
}
throw parsebase::ParserException(parser.location(), "variable “" + variableName + "” used but never declared");
throw tokenize::TokenizerException(tokenizer.location(), "variable “" + variableName + "” used but never declared");
}
////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@@ -10,7 +10,7 @@
#include <plasp/pddl/Problem.h>
#include <plasp/pddl/expressions/PrimitiveType.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -35,11 +35,11 @@ Constant::Constant()
ConstantPointer Constant::parseDeclaration(Context &context)
{
context.parser.skipWhiteSpace();
context.tokenizer.skipWhiteSpace();
auto constant = ConstantPointer(new Constant);
constant->m_name = context.parser.parseIdentifier();
constant->m_name = context.tokenizer.getIdentifier();
BOOST_ASSERT(constant->m_name != "-");
@@ -70,10 +70,10 @@ void Constant::parseTypedDeclaration(Context &context, Domain &domain, Constants
// Parse and store constant
constants.emplace_back(parseDeclaration(context));
context.parser.skipWhiteSpace();
context.tokenizer.skipWhiteSpace();
// Check for typing information
if (!context.parser.testAndSkip<char>('-'))
if (!context.tokenizer.testAndSkip<char>('-'))
return;
// If existing, parse and store parent type
@@ -95,13 +95,13 @@ void Constant::parseTypedDeclaration(Context &context, Domain &domain, Constants
void Constant::parseTypedDeclarations(Context &context, Domain &domain)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
while (parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
parseTypedDeclaration(context, domain);
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
if (domain.constants().empty())
@@ -115,20 +115,20 @@ void Constant::parseTypedDeclarations(Context &context, Domain &domain)
domain.checkRequirement(Requirement::Type::Typing);
// If no types are given, check that typing is not a requirement
else if (domain.hasRequirement(Requirement::Type::Typing))
throw parsebase::ParserException(parser.location(), "constant has undeclared type");
throw tokenize::TokenizerException(tokenizer.location(), "constant has undeclared type");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Constant::parseTypedDeclarations(Context &context, Problem &problem)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
while (context.parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
parseTypedDeclaration(context, problem);
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
if (problem.objects().empty())
@@ -142,36 +142,36 @@ void Constant::parseTypedDeclarations(Context &context, Problem &problem)
problem.checkRequirement(Requirement::Type::Typing);
// If no types are given, check that typing is not a requirement
else if (problem.hasRequirement(Requirement::Type::Typing))
throw parsebase::ParserException(parser.location(), "constant has undeclared type");
throw tokenize::TokenizerException(tokenizer.location(), "constant has undeclared type");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
ConstantPointer Constant::parseAndFind(Context &context, const Domain &domain)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
const auto constantName = parser.parseIdentifier();
const auto constantName = tokenizer.getIdentifier();
auto constant = parseAndFind(constantName, domain.constants());
if (constant != nullptr)
return constant;
throw parsebase::ParserException(parser.location(), "constant “" + constantName + "” used but never declared");
throw tokenize::TokenizerException(tokenizer.location(), "constant “" + constantName + "” used but never declared");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
ConstantPointer Constant::parseAndFind(Context &context, const Problem &problem)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
const auto constantName = parser.parseIdentifier();
const auto constantName = tokenizer.getIdentifier();
auto constant = parseAndFind(constantName, problem.domain().constants());
@@ -183,7 +183,7 @@ ConstantPointer Constant::parseAndFind(Context &context, const Problem &problem)
if (constant)
return constant;
throw parsebase::ParserException(parser.location(), "constant “" + constantName + "” used but never declared");
throw tokenize::TokenizerException(tokenizer.location(), "constant “" + constantName + "” used but never declared");
}
////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@@ -29,17 +29,17 @@ Predicate::Predicate()
PredicatePointer Predicate::parse(Context &context, ExpressionContext &expressionContext)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
const auto position = parser.position();
const auto position = tokenizer.position();
if (!parser.testAndSkip<std::string>("("))
if (!tokenizer.testAndSkip<std::string>("("))
{
parser.seek(position);
tokenizer.seek(position);
return nullptr;
}
const auto predicateName = parser.parseIdentifier();
const auto predicateName = tokenizer.getIdentifier();
const auto &predicates = expressionContext.domain.predicates();
const auto matchingPredicate = std::find_if(predicates.cbegin(), predicates.cend(),
@@ -50,7 +50,7 @@ PredicatePointer Predicate::parse(Context &context, ExpressionContext &expressio
if (matchingPredicate == predicates.cend())
{
parser.seek(position);
tokenizer.seek(position);
return nullptr;
}
@@ -58,13 +58,13 @@ PredicatePointer Predicate::parse(Context &context, ExpressionContext &expressio
predicate->m_name = predicateName;
context.parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
// Parse arguments
while (context.parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
// Parse variables
if (context.parser.currentCharacter() == '?')
if (tokenizer.currentCharacter() == '?')
{
const auto variable = expressionContext.variables.parseAndFind(context);
predicate->m_arguments.emplace_back(variable);
@@ -78,12 +78,12 @@ PredicatePointer Predicate::parse(Context &context, ExpressionContext &expressio
predicate->m_arguments.emplace_back(constant);
}
context.parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
// TODO: check that signature matches one of the declared ones
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
return predicate;
}
@@ -92,17 +92,17 @@ PredicatePointer Predicate::parse(Context &context, ExpressionContext &expressio
PredicatePointer Predicate::parse(Context &context, const Problem &problem)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
const auto position = parser.position();
const auto position = tokenizer.position();
if (!parser.testAndSkip<std::string>("("))
if (!tokenizer.testAndSkip<std::string>("("))
{
parser.seek(position);
tokenizer.seek(position);
return nullptr;
}
const auto predicateName = parser.parseIdentifier();
const auto predicateName = tokenizer.getIdentifier();
const auto &predicates = problem.domain().predicates();
const auto matchingPredicate = std::find_if(predicates.cbegin(), predicates.cend(),
@@ -113,7 +113,7 @@ PredicatePointer Predicate::parse(Context &context, const Problem &problem)
if (matchingPredicate == predicates.cend())
{
parser.seek(position);
tokenizer.seek(position);
return nullptr;
}
@@ -121,12 +121,12 @@ PredicatePointer Predicate::parse(Context &context, const Problem &problem)
predicate->m_name = predicateName;
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
while (parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
if (parser.currentCharacter() == '?')
throw parsebase::ParserException(parser.location(), "variables not allowed in this context");
if (tokenizer.currentCharacter() == '?')
throw tokenize::TokenizerException(tokenizer.location(), "variables not allowed in this context");
// Parse objects and constants
const auto constant = Constant::parseAndFind(context, problem);
@@ -135,7 +135,7 @@ PredicatePointer Predicate::parse(Context &context, const Problem &problem)
// TODO: check that signature matches one of the declared ones
parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
return predicate;
}

View File

@@ -28,16 +28,18 @@ PredicateDeclaration::PredicateDeclaration()
void PredicateDeclaration::parse(Context &context, Domain &domain)
{
context.parser.expect<std::string>("(");
auto &tokenizer = context.tokenizer;
tokenizer.expect<std::string>("(");
auto predicate = PredicateDeclarationPointer(new PredicateDeclaration);
predicate->m_name = context.parser.parseIdentifier();
predicate->m_name = tokenizer.getIdentifier();
// Flag predicate as correctly declared in the types section
predicate->setDeclared();
context.parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
ExpressionContext expressionContext(domain);
expressionContext.variables.push(&predicate->m_parameters);
@@ -45,7 +47,7 @@ void PredicateDeclaration::parse(Context &context, Domain &domain)
// Parse parameters
Variable::parseTypedDeclarations(context, expressionContext, predicate->m_parameters);
context.parser.expect<std::string>(")");
tokenizer.expect<std::string>(")");
domain.predicates().emplace_back(std::move(predicate));
}

View File

@@ -8,7 +8,7 @@
#include <plasp/pddl/Domain.h>
#include <plasp/pddl/ExpressionContext.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -43,9 +43,9 @@ void PrimitiveType::parseDeclaration(Context &context, Domain &domain)
{
auto &types = domain.types();
context.parser.skipWhiteSpace();
context.tokenizer.skipWhiteSpace();
const auto typeName = context.parser.parseIdentifier();
const auto typeName = context.tokenizer.getIdentifier();
const auto match = std::find_if(types.cbegin(), types.cend(),
[&](const auto &primitiveType)
@@ -75,10 +75,10 @@ void PrimitiveType::parseTypedDeclaration(Context &context, Domain &domain)
// Parse and store type
parseDeclaration(context, domain);
context.parser.skipWhiteSpace();
context.tokenizer.skipWhiteSpace();
// Check for type inheritance
if (!context.parser.testAndSkip<char>('-'))
if (!context.tokenizer.testAndSkip<char>('-'))
return;
domain.checkRequirement(Requirement::Type::Typing);
@@ -104,16 +104,16 @@ void PrimitiveType::parseTypedDeclaration(Context &context, Domain &domain)
PrimitiveTypePointer PrimitiveType::parseAndFind(Context &context, Domain &domain)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
auto &types = domain.types();
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
const auto typeName = parser.parseIdentifier();
const auto typeName = tokenizer.getIdentifier();
if (typeName.empty())
throw parsebase::ParserException(parser.location(), "no type supplied");
throw tokenize::TokenizerException(tokenizer.location(), "no type supplied");
const auto match = std::find_if(types.cbegin(), types.cend(),
[&](const auto &primitiveType)
@@ -126,11 +126,11 @@ PrimitiveTypePointer PrimitiveType::parseAndFind(Context &context, Domain &domai
// Only "object" is allowed as an implicit type
if (typeName == "object" || typeName == "objects")
{
context.logger.log(output::Priority::Warning, parser.location(), "primitive type “" + typeName + "” should be declared");
context.logger.log(output::Priority::Warning, tokenizer.location(), "primitive type “" + typeName + "” should be declared");
types.emplace_back(PrimitiveTypePointer(new PrimitiveType(typeName)));
}
else
throw parsebase::ParserException(parser.location(), "type “" + typeName + "” used but never declared");
throw tokenize::TokenizerException(tokenizer.location(), "type “" + typeName + "” used but never declared");
return types.back().get();
}

View File

@@ -17,17 +17,17 @@ namespace expressions
UnsupportedPointer Unsupported::parse(Context &context)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
auto expression = UnsupportedPointer(new Unsupported);
parser.expect<std::string>("(");
tokenizer.expect<std::string>("(");
expression->m_type = parser.parseIdentifier();
expression->m_type = tokenizer.getIdentifier();
context.logger.log(output::Priority::Warning, parser.location(), "expression type “" + expression->m_type + "” currently unsupported in this context");
context.logger.log(output::Priority::Warning, tokenizer.location(), "expression type “" + expression->m_type + "” currently unsupported in this context");
skipSection(parser);
skipSection(tokenizer);
return expression;
}

View File

@@ -11,7 +11,7 @@
#include <plasp/pddl/expressions/PrimitiveType.h>
#include <plasp/pddl/expressions/Type.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -43,15 +43,15 @@ Variable::Variable(std::string name)
void Variable::parseDeclaration(Context &context, Variables &parameters)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
parser.expect<std::string>("?");
tokenizer.expect<std::string>("?");
auto variable = VariablePointer(new Variable);
variable->m_name = parser.parseIdentifier();
variable->m_name = tokenizer.getIdentifier();
// Check if variable of that name already exists in the current scope
const auto match = std::find_if(parameters.cbegin(), parameters.cend(),
@@ -61,7 +61,7 @@ void Variable::parseDeclaration(Context &context, Variables &parameters)
});
if (match != parameters.cend())
throw parsebase::ParserException(parser.location(), "variable “" + variable->m_name + "” already declared in this scope");
throw tokenize::TokenizerException(tokenizer.location(), "variable “" + variable->m_name + "” already declared in this scope");
// Flag variable for potentially upcoming type declaration
variable->setDirty();
@@ -74,17 +74,17 @@ void Variable::parseDeclaration(Context &context, Variables &parameters)
void Variable::parseTypedDeclaration(Context &context, ExpressionContext &expressionContext,
Variables &variables)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
// Parse and store variable itself
parseDeclaration(context, variables);
auto variable = variables.back();
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
// Check if the variable has a type declaration
if (!parser.testAndSkip<char>('-'))
if (!tokenizer.testAndSkip<char>('-'))
return;
const auto setType =
@@ -101,7 +101,7 @@ void Variable::parseTypedDeclaration(Context &context, ExpressionContext &expres
}
};
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
// Parse argument if it has "either" type (always begins with opening parenthesis)
variable->m_type = Either::parse(context, expressionContext, parseExistingPrimitiveType);
@@ -118,13 +118,13 @@ void Variable::parseTypedDeclaration(Context &context, ExpressionContext &expres
void Variable::parseTypedDeclarations(Context &context, ExpressionContext &expressionContext,
Variables &variables)
{
auto &parser = context.parser;
auto &tokenizer = context.tokenizer;
while (parser.currentCharacter() != ')')
while (tokenizer.currentCharacter() != ')')
{
parseTypedDeclaration(context, expressionContext, variables);
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
}
if (variables.empty())
@@ -138,7 +138,7 @@ void Variable::parseTypedDeclarations(Context &context, ExpressionContext &expre
expressionContext.checkRequirement(Requirement::Type::Typing);
// If no types are given, check that typing is not a requirement
else if (expressionContext.hasRequirement(Requirement::Type::Typing))
throw parsebase::ParserException(parser.location(), "variable has undeclared type");
throw tokenize::TokenizerException(tokenizer.location(), "variable has undeclared type");
}
////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@@ -31,24 +31,24 @@ AssignedVariable::AssignedVariable(const Variable &variable, const Value &value)
////////////////////////////////////////////////////////////////////////////////////////////////////
AssignedVariable AssignedVariable::fromSAS(parsebase::Parser<> &parser, const Variables &variables)
AssignedVariable AssignedVariable::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variables &variables)
{
AssignedVariable assignedVariable;
assignedVariable.m_variable = &Variable::referenceFromSAS(parser, variables);
assignedVariable.m_value = &Value::referenceFromSAS(parser, *assignedVariable.m_variable);
assignedVariable.m_variable = &Variable::referenceFromSAS(tokenizer, variables);
assignedVariable.m_value = &Value::referenceFromSAS(tokenizer, *assignedVariable.m_variable);
return assignedVariable;
}
////////////////////////////////////////////////////////////////////////////////////////////////////
AssignedVariable AssignedVariable::fromSAS(parsebase::Parser<> &parser, const Variable &variable)
AssignedVariable AssignedVariable::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variable &variable)
{
AssignedVariable assignedVariable;
assignedVariable.m_variable = &variable;
assignedVariable.m_value = &Value::referenceFromSAS(parser, *assignedVariable.m_variable);
assignedVariable.m_value = &Value::referenceFromSAS(tokenizer, *assignedVariable.m_variable);
return assignedVariable;
}

View File

@@ -23,24 +23,24 @@ AxiomRule::AxiomRule(AxiomRule::Conditions conditions, AxiomRule::Condition post
////////////////////////////////////////////////////////////////////////////////////////////////////
AxiomRule AxiomRule::fromSAS(parsebase::Parser<> &parser, const Variables &variables)
AxiomRule AxiomRule::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variables &variables)
{
parser.expect<std::string>("begin_rule");
tokenizer.expect<std::string>("begin_rule");
const auto numberOfConditions = parser.parse<size_t>();
const auto numberOfConditions = tokenizer.get<size_t>();
Conditions conditions;
conditions.reserve(numberOfConditions);
for (size_t j = 0; j < numberOfConditions; j++)
conditions.emplace_back(Condition::fromSAS(parser, variables));
conditions.emplace_back(Condition::fromSAS(tokenizer, variables));
const auto variableTransition = VariableTransition::fromSAS(parser, variables);
const auto variableTransition = VariableTransition::fromSAS(tokenizer, variables);
if (&variableTransition.valueBefore() != &Value::Any)
conditions.emplace_back(Condition(variableTransition.variable(), variableTransition.valueBefore()));
parser.expect<std::string>("end_rule");
tokenizer.expect<std::string>("end_rule");
const Condition postcondition(variableTransition.variable(), variableTransition.valueAfter());
const AxiomRule axiomRule(std::move(conditions), std::move(postcondition));

View File

@@ -7,7 +7,7 @@
#include <plasp/sas/VariableTransition.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -27,10 +27,10 @@ Description::Description()
////////////////////////////////////////////////////////////////////////////////////////////////////
Description Description::fromParser(parsebase::Parser<> &&parser)
Description Description::fromTokenizer(tokenize::Tokenizer<> &&tokenizer)
{
Description description;
description.parseContent(parser);
description.parseContent(tokenizer);
return description;
}
@@ -39,11 +39,11 @@ Description Description::fromParser(parsebase::Parser<> &&parser)
Description Description::fromStream(std::istream &istream)
{
parsebase::Parser<> parser;
parser.read("std::cin", istream);
tokenize::Tokenizer<> tokenizer;
tokenizer.read("std::cin", istream);
Description description;
description.parseContent(parser);
description.parseContent(tokenizer);
return description;
}
@@ -55,11 +55,11 @@ Description Description::fromFile(const std::experimental::filesystem::path &pat
if (!std::experimental::filesystem::is_regular_file(path))
throw std::runtime_error("File does not exist: “" + path.string() + "");
parsebase::Parser<> parser;
parser.read(path);
tokenize::Tokenizer<> tokenizer;
tokenizer.read(path);
Description description;
description.parseContent(parser);
description.parseContent(tokenizer);
return description;
}
@@ -159,104 +159,104 @@ bool Description::hasRequirements() const
////////////////////////////////////////////////////////////////////////////////////////////////////
void Description::parseContent(parsebase::Parser<> &parser)
void Description::parseContent(tokenize::Tokenizer<> &tokenizer)
{
parseVersionSection(parser);
parseMetricSection(parser);
parseVariablesSection(parser);
parseMutexSection(parser);
parseInitialStateSection(parser);
parseGoalSection(parser);
parseOperatorSection(parser);
parseAxiomSection(parser);
parseVersionSection(tokenizer);
parseMetricSection(tokenizer);
parseVariablesSection(tokenizer);
parseMutexSection(tokenizer);
parseInitialStateSection(tokenizer);
parseGoalSection(tokenizer);
parseOperatorSection(tokenizer);
parseAxiomSection(tokenizer);
parser.skipWhiteSpace();
tokenizer.skipWhiteSpace();
if (!parser.atEnd())
throw parsebase::ParserException(parser.location(), "expected end of SAS description (perhaps, input contains two SAS descriptions?)");
if (!tokenizer.atEnd())
throw tokenize::TokenizerException(tokenizer.location(), "expected end of SAS description (perhaps, input contains two SAS descriptions?)");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Description::parseVersionSection(parsebase::Parser<> &parser) const
void Description::parseVersionSection(tokenize::Tokenizer<> &tokenizer) const
{
parser.expect<std::string>("begin_version");
tokenizer.expect<std::string>("begin_version");
const auto formatVersion = parser.parse<size_t>();
const auto formatVersion = tokenizer.get<size_t>();
if (formatVersion != 3)
throw parsebase::ParserException(parser.location(), "unsupported SAS format version (" + std::to_string(formatVersion) + ")");
throw tokenize::TokenizerException(tokenizer.location(), "unsupported SAS format version (" + std::to_string(formatVersion) + ")");
parser.expect<std::string>("end_version");
tokenizer.expect<std::string>("end_version");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Description::parseMetricSection(parsebase::Parser<> &parser)
void Description::parseMetricSection(tokenize::Tokenizer<> &tokenizer)
{
parser.expect<std::string>("begin_metric");
tokenizer.expect<std::string>("begin_metric");
m_usesActionCosts = parser.parse<bool>();
m_usesActionCosts = tokenizer.get<bool>();
parser.expect<std::string>("end_metric");
tokenizer.expect<std::string>("end_metric");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Description::parseVariablesSection(parsebase::Parser<> &parser)
void Description::parseVariablesSection(tokenize::Tokenizer<> &tokenizer)
{
const auto numberOfVariables = parser.parse<size_t>();
const auto numberOfVariables = tokenizer.get<size_t>();
m_variables.reserve(numberOfVariables);
for (size_t i = 0; i < numberOfVariables; i++)
m_variables.emplace_back(Variable::fromSAS(parser));
m_variables.emplace_back(Variable::fromSAS(tokenizer));
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Description::parseMutexSection(parsebase::Parser<> &parser)
void Description::parseMutexSection(tokenize::Tokenizer<> &tokenizer)
{
const auto numberOfMutexGroups = parser.parse<size_t>();
const auto numberOfMutexGroups = tokenizer.get<size_t>();
m_mutexGroups.reserve(numberOfMutexGroups);
for (size_t i = 0; i < numberOfMutexGroups; i++)
m_mutexGroups.emplace_back(MutexGroup::fromSAS(parser, m_variables));
m_mutexGroups.emplace_back(MutexGroup::fromSAS(tokenizer, m_variables));
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Description::parseInitialStateSection(parsebase::Parser<> &parser)
void Description::parseInitialStateSection(tokenize::Tokenizer<> &tokenizer)
{
m_initialState = std::make_unique<InitialState>(InitialState::fromSAS(parser, m_variables));
m_initialState = std::make_unique<InitialState>(InitialState::fromSAS(tokenizer, m_variables));
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Description::parseGoalSection(parsebase::Parser<> &parser)
void Description::parseGoalSection(tokenize::Tokenizer<> &tokenizer)
{
m_goal = std::make_unique<Goal>(Goal::fromSAS(parser, m_variables));
m_goal = std::make_unique<Goal>(Goal::fromSAS(tokenizer, m_variables));
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Description::parseOperatorSection(parsebase::Parser<> &parser)
void Description::parseOperatorSection(tokenize::Tokenizer<> &tokenizer)
{
const auto numberOfOperators = parser.parse<size_t>();
const auto numberOfOperators = tokenizer.get<size_t>();
m_operators.reserve(numberOfOperators);
for (size_t i = 0; i < numberOfOperators; i++)
m_operators.emplace_back(Operator::fromSAS(parser, m_variables));
m_operators.emplace_back(Operator::fromSAS(tokenizer, m_variables));
}
////////////////////////////////////////////////////////////////////////////////////////////////////
void Description::parseAxiomSection(parsebase::Parser<> &parser)
void Description::parseAxiomSection(tokenize::Tokenizer<> &tokenizer)
{
const auto numberOfAxiomRules = parser.parse<size_t>();
const auto numberOfAxiomRules = tokenizer.get<size_t>();
m_axiomRules.reserve(numberOfAxiomRules);
for (size_t i = 0; i < numberOfAxiomRules; i++)
m_axiomRules.emplace_back(AxiomRule::fromSAS(parser, m_variables));
m_axiomRules.emplace_back(AxiomRule::fromSAS(tokenizer, m_variables));
}
////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@@ -23,17 +23,17 @@ Effect::Effect(Conditions conditions, Condition postcondition)
////////////////////////////////////////////////////////////////////////////////////////////////////
Effect Effect::fromSAS(parsebase::Parser<> &parser, const Variables &variables, Conditions &preconditions)
Effect Effect::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variables &variables, Conditions &preconditions)
{
Effect::Conditions conditions;
const auto numberOfEffectConditions = parser.parse<size_t>();
const auto numberOfEffectConditions = tokenizer.get<size_t>();
conditions.reserve(numberOfEffectConditions);
for (size_t k = 0; k < numberOfEffectConditions; k++)
conditions.emplace_back(Condition::fromSAS(parser, variables));
conditions.emplace_back(Condition::fromSAS(tokenizer, variables));
const auto variableTransition = VariableTransition::fromSAS(parser, variables);
const auto variableTransition = VariableTransition::fromSAS(tokenizer, variables);
if (&variableTransition.valueBefore() != &Value::Any)
preconditions.emplace_back(Condition(variableTransition.variable(), variableTransition.valueBefore()));

View File

@@ -13,19 +13,19 @@ namespace sas
//
////////////////////////////////////////////////////////////////////////////////////////////////////
Goal Goal::fromSAS(parsebase::Parser<> &parser, const Variables &variables)
Goal Goal::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variables &variables)
{
Goal goal;
parser.expect<std::string>("begin_goal");
tokenizer.expect<std::string>("begin_goal");
const auto numberOfGoalFacts = parser.parse<size_t>();
const auto numberOfGoalFacts = tokenizer.get<size_t>();
goal.m_facts.reserve(numberOfGoalFacts);
for (size_t i = 0; i < numberOfGoalFacts; i++)
goal.m_facts.emplace_back(Fact::fromSAS(parser, variables));
goal.m_facts.emplace_back(Fact::fromSAS(tokenizer, variables));
parser.expect<std::string>("end_goal");
tokenizer.expect<std::string>("end_goal");
return goal;
}

View File

@@ -11,18 +11,18 @@ namespace sas
//
////////////////////////////////////////////////////////////////////////////////////////////////////
InitialState InitialState::fromSAS(parsebase::Parser<> &parser, const Variables &variables)
InitialState InitialState::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variables &variables)
{
InitialState initialState;
parser.expect<std::string>("begin_state");
tokenizer.expect<std::string>("begin_state");
initialState.m_facts.reserve(variables.size());
for (size_t i = 0; i < variables.size(); i++)
initialState.m_facts.emplace_back(Fact::fromSAS(parser, variables[i]));
initialState.m_facts.emplace_back(Fact::fromSAS(tokenizer, variables[i]));
parser.expect<std::string>("end_state");
tokenizer.expect<std::string>("end_state");
return initialState;
}

View File

@@ -2,7 +2,7 @@
#include <iostream>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -15,24 +15,24 @@ namespace sas
//
////////////////////////////////////////////////////////////////////////////////////////////////////
MutexGroup MutexGroup::fromSAS(parsebase::Parser<> &parser, const Variables &variables)
MutexGroup MutexGroup::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variables &variables)
{
MutexGroup mutexGroup;
parser.expect<std::string>("begin_mutex_group");
tokenizer.expect<std::string>("begin_mutex_group");
const auto numberOfFacts = parser.parse<size_t>();
const auto numberOfFacts = tokenizer.get<size_t>();
mutexGroup.m_facts.reserve(numberOfFacts);
for (size_t j = 0; j < numberOfFacts; j++)
{
mutexGroup.m_facts.emplace_back(Fact::fromSAS(parser, variables));
mutexGroup.m_facts.emplace_back(Fact::fromSAS(tokenizer, variables));
if (mutexGroup.m_facts[j].value() == Value::None)
throw parsebase::ParserException(parser.location(), "mutex groups must not contain <none of those> values");
throw tokenize::TokenizerException(tokenizer.location(), "mutex groups must not contain <none of those> values");
}
parser.expect<std::string>("end_mutex_group");
tokenizer.expect<std::string>("end_mutex_group");
return mutexGroup;
}

View File

@@ -17,29 +17,29 @@ namespace sas
//
////////////////////////////////////////////////////////////////////////////////////////////////////
Operator Operator::fromSAS(parsebase::Parser<> &parser, const Variables &variables)
Operator Operator::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variables &variables)
{
Operator operator_;
parser.expect<std::string>("begin_operator");
tokenizer.expect<std::string>("begin_operator");
operator_.m_predicate = Predicate::fromSAS(parser);
operator_.m_predicate = Predicate::fromSAS(tokenizer);
const auto numberOfPrevailConditions = parser.parse<size_t>();
const auto numberOfPrevailConditions = tokenizer.get<size_t>();
operator_.m_preconditions.reserve(numberOfPrevailConditions);
for (size_t j = 0; j < numberOfPrevailConditions; j++)
operator_.m_preconditions.emplace_back(Condition::fromSAS(parser, variables));
operator_.m_preconditions.emplace_back(Condition::fromSAS(tokenizer, variables));
const auto numberOfEffects = parser.parse<size_t>();
const auto numberOfEffects = tokenizer.get<size_t>();
operator_.m_effects.reserve(numberOfEffects);
for (size_t j = 0; j < numberOfEffects; j++)
operator_.m_effects.emplace_back(Effect::fromSAS(parser, variables, operator_.m_preconditions));
operator_.m_effects.emplace_back(Effect::fromSAS(tokenizer, variables, operator_.m_preconditions));
operator_.m_costs = parser.parse<size_t>();
operator_.m_costs = tokenizer.get<size_t>();
parser.expect<std::string>("end_operator");
tokenizer.expect<std::string>("end_operator");
return operator_;
}

View File

@@ -6,7 +6,7 @@
#include <plasp/output/Formatting.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -19,32 +19,32 @@ namespace sas
//
////////////////////////////////////////////////////////////////////////////////////////////////////
Predicate Predicate::fromSAS(parsebase::Parser<> &parser)
Predicate Predicate::fromSAS(tokenize::Tokenizer<> &tokenizer)
{
Predicate predicate;
try
{
parser.skipLine();
tokenizer.skipLine();
predicate.m_name = parser.parse<std::string>();
predicate.m_name = tokenizer.get<std::string>();
while (true)
{
// Skip whitespace but not newlines
parser.skipBlankSpace();
tokenizer.skipBlankSpace();
// TODO: check \r handling
if (parser.currentCharacter() == '\n')
if (tokenizer.currentCharacter() == '\n')
break;
const auto value = parser.parse<std::string>();
const auto value = tokenizer.get<std::string>();
predicate.m_arguments.emplace_back(std::move(value));
}
}
catch (const std::exception &e)
{
throw parsebase::ParserException(parser.location(), "could not parse operator predicate");
throw tokenize::TokenizerException(tokenizer.location(), "could not parse operator predicate");
}
return predicate;

View File

@@ -5,7 +5,7 @@
#include <plasp/output/Formatting.h>
#include <plasp/sas/Variable.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -55,14 +55,14 @@ Value Value::negated() const
////////////////////////////////////////////////////////////////////////////////////////////////////
Value Value::fromSAS(parsebase::Parser<> &parser)
Value Value::fromSAS(tokenize::Tokenizer<> &tokenizer)
{
const auto sasSign = parser.parse<std::string>();
const auto sasSign = tokenizer.get<std::string>();
if (sasSign == "<none")
{
parser.expect<std::string>("of");
parser.expect<std::string>("those>");
tokenizer.expect<std::string>("of");
tokenizer.expect<std::string>("those>");
// TODO: do not return a copy of Value::None
return Value::None;
@@ -75,12 +75,12 @@ Value Value::fromSAS(parsebase::Parser<> &parser)
else if (sasSign == "NegatedAtom")
value.m_sign = Value::Sign::Negative;
else
throw parsebase::ParserException(parser.location(), "invalid value sign “" + sasSign + "");
throw tokenize::TokenizerException(tokenizer.location(), "invalid value sign “" + sasSign + "");
try
{
parser.skipWhiteSpace();
value.m_name = parser.parseLine();
tokenizer.skipWhiteSpace();
value.m_name = tokenizer.getLine();
// Remove trailing ()
if (value.m_name.find("()") != std::string::npos)
@@ -91,7 +91,7 @@ Value Value::fromSAS(parsebase::Parser<> &parser)
}
catch (const std::exception &e)
{
throw parsebase::ParserException(parser.location(), std::string("could not parse variable value (") + e.what() + ")");
throw tokenize::TokenizerException(tokenizer.location(), std::string("could not parse variable value (") + e.what() + ")");
}
return value;
@@ -99,15 +99,15 @@ Value Value::fromSAS(parsebase::Parser<> &parser)
////////////////////////////////////////////////////////////////////////////////////////////////////
const Value &Value::referenceFromSAS(parsebase::Parser<> &parser, const Variable &variable)
const Value &Value::referenceFromSAS(tokenize::Tokenizer<> &tokenizer, const Variable &variable)
{
const auto valueID = parser.parse<int>();
const auto valueID = tokenizer.get<int>();
if (valueID == -1)
return Value::Any;
if (valueID < 0 || static_cast<size_t>(valueID) >= variable.values().size())
throw parsebase::ParserException(parser.location(), "value index out of range (variable " + variable.name() + ", index " + std::to_string(valueID) + ")");
throw tokenize::TokenizerException(tokenizer.location(), "value index out of range (variable " + variable.name() + ", index " + std::to_string(valueID) + ")");
return variable.values()[valueID];
}

View File

@@ -4,7 +4,7 @@
#include <plasp/output/Formatting.h>
#include <parsebase/ParserException.h>
#include <tokenize/TokenizerException.h>
namespace plasp
{
@@ -24,29 +24,29 @@ Variable::Variable()
////////////////////////////////////////////////////////////////////////////////////////////////////
Variable Variable::fromSAS(parsebase::Parser<> &parser)
Variable Variable::fromSAS(tokenize::Tokenizer<> &tokenizer)
{
Variable variable;
parser.expect<std::string>("begin_variable");
parser.expect<std::string>("var");
tokenizer.expect<std::string>("begin_variable");
tokenizer.expect<std::string>("var");
variable.m_name = parser.parse<std::string>();
variable.m_axiomLayer = parser.parse<int>();
variable.m_name = tokenizer.get<std::string>();
variable.m_axiomLayer = tokenizer.get<int>();
const auto numberOfValues = parser.parse<size_t>();
const auto numberOfValues = tokenizer.get<size_t>();
variable.m_values.reserve(numberOfValues);
for (size_t j = 0; j < numberOfValues; j++)
{
variable.m_values.emplace_back(Value::fromSAS(parser));
variable.m_values.emplace_back(Value::fromSAS(tokenizer));
// <none of those> values are only allowed at the end
if (j < numberOfValues - 1 && variable.m_values[j] == Value::None)
throw parsebase::ParserException(parser.location(), "<none of those> value must be the last value of a variable");
throw tokenize::TokenizerException(tokenizer.location(), "<none of those> value must be the last value of a variable");
}
parser.expect<std::string>("end_variable");
tokenizer.expect<std::string>("end_variable");
return variable;
}
@@ -61,12 +61,12 @@ void Variable::printNameAsASPPredicate(output::ColorStream &stream) const
////////////////////////////////////////////////////////////////////////////////////////////////////
const Variable &Variable::referenceFromSAS(parsebase::Parser<> &parser, const Variables &variables)
const Variable &Variable::referenceFromSAS(tokenize::Tokenizer<> &tokenizer, const Variables &variables)
{
const auto variableID = parser.parse<size_t>();
const auto variableID = tokenizer.get<size_t>();
if (variableID >= variables.size())
throw parsebase::ParserException(parser.location(), "variable index out of range (index " + std::to_string(variableID) + ")");
throw tokenize::TokenizerException(tokenizer.location(), "variable index out of range (index " + std::to_string(variableID) + ")");
return variables[variableID];
}

View File

@@ -24,13 +24,13 @@ VariableTransition::VariableTransition()
////////////////////////////////////////////////////////////////////////////////////////////////////
VariableTransition VariableTransition::fromSAS(parsebase::Parser<> &parser, const Variables &variables)
VariableTransition VariableTransition::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variables &variables)
{
VariableTransition variableTransition;
variableTransition.m_variable = &Variable::referenceFromSAS(parser, variables);
variableTransition.m_valueBefore = &Value::referenceFromSAS(parser, *variableTransition.m_variable);
variableTransition.m_valueAfter = &Value::referenceFromSAS(parser, *variableTransition.m_variable);
variableTransition.m_variable = &Variable::referenceFromSAS(tokenizer, variables);
variableTransition.m_valueBefore = &Value::referenceFromSAS(tokenizer, *variableTransition.m_variable);
variableTransition.m_valueAfter = &Value::referenceFromSAS(tokenizer, *variableTransition.m_variable);
return variableTransition;
}