This repository has been archived on 2023-07-19. You can view files and clone it, but cannot push or open issues or pull requests.
plasp/lib/pddlparse/src/pddlparse/detail/parsing/InitialState.cpp

80 lines
2.0 KiB
C++
Raw Normal View History

2017-06-13 19:52:15 +02:00
#include <pddlparse/detail/parsing/InitialState.h>
#include <pddlparse/AST.h>
// TODO: remove
#include <pddlparse/detail/parsing/Utils.h>
namespace pddl
{
namespace detail
{
////////////////////////////////////////////////////////////////////////////////////////////////////
//
// InitialState
//
////////////////////////////////////////////////////////////////////////////////////////////////////
ast::InitialState parseInitialState(Context &context, ASTContext &)
{
auto &tokenizer = context.tokenizer;
ast::InitialState initialState;
// TODO: reimplement
/*const auto parseInitialStateElement =
[&]() -> ExpressionPointer
{
ExpressionPointer expression;
// TODO: do not allow negative initial state literals
if ((expression = parseLiteral(context, expressionContext))
|| (expression = expressions::At::parse(context, expressionContext, parseLiteral)))
{
return expression;
}
const auto position = tokenizer.position();
tokenizer.expect<std::string>("(");
const auto expressionIdentifierPosition = tokenizer.position();
if (tokenizer.testIdentifierAndSkip("="))
{
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
tokenizer.seek(position);
return expressions::Unsupported::parse(context);
}
tokenizer.seek(expressionIdentifierPosition);
const auto expressionIdentifier = tokenizer.getIdentifier();
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
};
tokenizer.skipWhiteSpace();
while (tokenizer.currentCharacter() != ')')
{
ast::Expression expression;
if ((expression = parseInitialStateElement()))
initialState->m_facts.emplace_back(std::move(expression));
tokenizer.skipWhiteSpace();
}*/
skipSection(tokenizer);
return initialState;
}
////////////////////////////////////////////////////////////////////////////////////////////////////
}
}