patrick
/
plasp
Archived
1
0
Fork 0

Made plasp compatible to recent tokenizer changes.

This commit is contained in:
Patrick Lühne 2017-06-19 00:28:17 +02:00
parent 9fe489de53
commit 586e93190c
Signed by: patrick
GPG Key ID: 05F3611E97A70ABF
22 changed files with 81 additions and 81 deletions

View File

@ -46,9 +46,9 @@ class Description
Context &m_context;
tokenize::Stream::Position m_domainPosition;
tokenize::StreamPosition m_domainPosition;
std::unique_ptr<Domain> m_domain;
tokenize::Stream::Position m_problemPosition;
tokenize::StreamPosition m_problemPosition;
std::unique_ptr<Problem> m_problem;
};

View File

@ -75,19 +75,19 @@ class Domain
std::string m_name;
tokenize::Stream::Position m_requirementsPosition;
tokenize::StreamPosition m_requirementsPosition;
Requirements m_requirements;
tokenize::Stream::Position m_typesPosition;
tokenize::StreamPosition m_typesPosition;
expressions::PrimitiveTypes m_types;
tokenize::Stream::Position m_constantsPosition;
tokenize::StreamPosition m_constantsPosition;
expressions::Constants m_constants;
tokenize::Stream::Position m_predicatesPosition;
tokenize::StreamPosition m_predicatesPosition;
expressions::PredicateDeclarations m_predicates;
std::vector<tokenize::Stream::Position> m_actionPositions;
std::vector<tokenize::StreamPosition> m_actionPositions;
std::vector<std::unique_ptr<Action>> m_actions;
expressions::DerivedPredicates m_derivedPredicates;

View File

@ -64,18 +64,18 @@ class Problem
std::string m_name;
tokenize::Stream::Position m_domainPosition;
tokenize::StreamPosition m_domainPosition;
tokenize::Stream::Position m_requirementsPosition;
tokenize::StreamPosition m_requirementsPosition;
Requirements m_requirements;
tokenize::Stream::Position m_objectsPosition;
tokenize::StreamPosition m_objectsPosition;
expressions::Constants m_objects;
tokenize::Stream::Position m_initialStatePosition;
tokenize::StreamPosition m_initialStatePosition;
std::unique_ptr<InitialState> m_initialState;
tokenize::Stream::Position m_goalPosition;
tokenize::StreamPosition m_goalPosition;
ExpressionPointer m_goal;
};

View File

@ -79,7 +79,7 @@ boost::intrusive_ptr<Derived> NAry<Derived>::parse(Context &context,
}
if (expression->m_arguments.empty())
context.logger.log(output::Priority::Warning, tokenizer.location(), "" + Derived::Identifier + "” expressions should not be empty");
context.logger.log(output::Priority::Warning, tokenizer, "" + Derived::Identifier + "” expressions should not be empty");
tokenizer.expect<std::string>(")");

View File

@ -136,7 +136,7 @@ void Logger::log(Priority priority, const tokenize::Location &location, const ch
m_errorStream
<< LocationFormat
<< location.sectionStart << ":" << location.rowStart << ":" << location.columnStart << ":"
<< location.sectionStart() << ":" << location.rowStart() << ":" << location.columnStart() << ":"
<< ResetFormat() << " "
<< priorityFormat(priority) << priorityName(priority) << ":"
<< ResetFormat() << " "

View File

@ -21,9 +21,9 @@ namespace pddl
Description::Description(Context &context)
: m_context(context),
m_domainPosition{-1},
m_domainPosition{tokenize::InvalidStreamPosition},
m_domain{std::make_unique<Domain>(Domain(m_context))},
m_problemPosition{-1}
m_problemPosition{tokenize::InvalidStreamPosition}
{
}
@ -133,13 +133,13 @@ void Description::parse()
// First, determine the locations of domain and problem
findSections();
if (m_domainPosition == -1)
if (m_domainPosition == tokenize::InvalidStreamPosition)
throw ConsistencyException("no PDDL domain specified");
tokenizer.seek(m_domainPosition);
m_domain->parse();
if (m_problemPosition != -1)
if (m_problemPosition != tokenize::InvalidStreamPosition)
{
tokenizer.seek(m_problemPosition);
m_problem->parse();
@ -166,8 +166,8 @@ void Description::findSections()
if (tokenizer.testAndSkip<std::string>("domain"))
{
if (m_domainPosition != -1)
throw tokenize::TokenizerException(tokenizer.location(), "PDDL description may not contain two domains");
if (m_domainPosition != tokenize::InvalidStreamPosition)
throw tokenize::TokenizerException(tokenizer, "PDDL description may not contain two domains");
m_domainPosition = position;
@ -176,8 +176,8 @@ void Description::findSections()
}
else if (tokenizer.testAndSkip<std::string>("problem"))
{
if (m_problemPosition != -1)
throw tokenize::TokenizerException(tokenizer.location(), "PDDL description may currently not contain two problems");
if (m_problemPosition != tokenize::InvalidStreamPosition)
throw tokenize::TokenizerException(tokenizer, "PDDL description may currently not contain two problems");
m_problem = std::make_unique<Problem>(Problem(m_context, *m_domain));
@ -189,7 +189,7 @@ void Description::findSections()
else
{
const auto sectionIdentifier = tokenizer.get<std::string>();
throw tokenize::TokenizerException(tokenizer.location(), "unknown PDDL section “" + sectionIdentifier + "");
throw tokenize::TokenizerException(tokenizer, "unknown PDDL section “" + sectionIdentifier + "");
}
tokenizer.skipWhiteSpace();

View File

@ -24,10 +24,10 @@ namespace pddl
Domain::Domain(Context &context)
: m_context(context),
m_requirementsPosition{-1},
m_typesPosition{-1},
m_constantsPosition{-1},
m_predicatesPosition{-1}
m_requirementsPosition{tokenize::InvalidStreamPosition},
m_typesPosition{tokenize::InvalidStreamPosition},
m_constantsPosition{tokenize::InvalidStreamPosition},
m_predicatesPosition{tokenize::InvalidStreamPosition}
{
}
@ -49,10 +49,10 @@ void Domain::findSections()
const auto setSectionPosition =
[&](const std::string &sectionName, auto &sectionPosition, const auto value, bool unique = false)
{
if (unique && sectionPosition != -1)
if (unique && sectionPosition != tokenize::InvalidStreamPosition)
{
tokenizer.seek(value);
throw tokenize::TokenizerException(tokenizer.location(), "only one “:" + sectionName + "” section allowed");
throw tokenize::TokenizerException(tokenizer, "only one “:" + sectionName + "” section allowed");
}
sectionPosition = value;
@ -93,7 +93,7 @@ void Domain::findSections()
const auto sectionIdentifier = tokenizer.getIdentifier();
m_context.logger.log(output::Priority::Warning, tokenizer.location(), "section type “" + sectionIdentifier + "” currently unsupported");
m_context.logger.log(output::Priority::Warning, tokenizer, "section type “" + sectionIdentifier + "” currently unsupported");
tokenizer.seek(sectionIdentifierPosition);
}
@ -102,7 +102,7 @@ void Domain::findSections()
const auto sectionIdentifier = tokenizer.getIdentifier();
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "unknown domain section “" + sectionIdentifier + "");
throw tokenize::TokenizerException(tokenizer, "unknown domain section “" + sectionIdentifier + "");
}
// Skip section for now and parse it later
@ -120,32 +120,32 @@ void Domain::parse()
{
auto &tokenizer = m_context.tokenizer;
if (m_requirementsPosition != -1)
if (m_requirementsPosition != tokenize::InvalidStreamPosition)
{
tokenizer.seek(m_requirementsPosition);
parseRequirementSection();
}
if (m_typesPosition != -1)
if (m_typesPosition != tokenize::InvalidStreamPosition)
{
tokenizer.seek(m_typesPosition);
parseTypeSection();
}
if (m_constantsPosition != -1)
if (m_constantsPosition != tokenize::InvalidStreamPosition)
{
tokenizer.seek(m_constantsPosition);
parseConstantSection();
}
if (m_predicatesPosition != -1)
if (m_predicatesPosition != tokenize::InvalidStreamPosition)
{
tokenizer.seek(m_predicatesPosition);
parsePredicateSection();
}
for (size_t i = 0; i < m_actionPositions.size(); i++)
if (m_actionPositions[i] != -1)
if (m_actionPositions[i] != tokenize::InvalidStreamPosition)
{
tokenizer.seek(m_actionPositions[i]);
parseActionSection();
@ -292,7 +292,7 @@ void Domain::checkRequirement(Requirement::Type requirementType)
if (hasRequirement(requirementType))
return;
m_context.logger.log(output::Priority::Warning, m_context.tokenizer.location(), "requirement “" + Requirement(requirementType).toPDDL() + "” used but never declared");
m_context.logger.log(output::Priority::Warning, m_context.tokenizer, "requirement “" + Requirement(requirementType).toPDDL() + "” used but never declared");
m_requirements.push_back(requirementType);
}
@ -355,7 +355,7 @@ void Domain::parseTypeSection()
while (tokenizer.currentCharacter() != ')')
{
if (tokenizer.currentCharacter() == '(')
throw tokenize::TokenizerException(tokenizer.location(), "only primitive types are allowed in type section");
throw tokenize::TokenizerException(tokenizer, "only primitive types are allowed in type section");
expressions::PrimitiveType::parseTypedDeclaration(m_context, *this);

View File

@ -182,7 +182,7 @@ ExpressionPointer parseExpression(Context &context, ExpressionContext &expressio
const auto expressionIdentifier = tokenizer.getIdentifier();
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
throw tokenize::TokenizerException(tokenizer, "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@ -257,7 +257,7 @@ ExpressionPointer parseEffectBodyExpression(Context &context, ExpressionContext
const auto expressionIdentifier = tokenizer.getIdentifier();
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
throw tokenize::TokenizerException(tokenizer, "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@ -281,7 +281,7 @@ ExpressionPointer parsePredicate(Context &context, ExpressionContext &expression
if ((expression = expressions::Predicate::parse(context, expressionContext)))
return expression;
throw tokenize::TokenizerException(context.tokenizer.location(), "expected predicate");
throw tokenize::TokenizerException(context.tokenizer, "expected predicate");
}
////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -59,7 +59,7 @@ std::unique_ptr<InitialState> InitialState::parseDeclaration(Context &context,
const auto expressionIdentifier = tokenizer.getIdentifier();
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
throw tokenize::TokenizerException(tokenizer, "expression type “" + expressionIdentifier + "” unknown or not allowed in this context");
};
tokenizer.skipWhiteSpace();

View File

@ -24,11 +24,11 @@ namespace pddl
Problem::Problem(Context &context, Domain &domain)
: m_context(context),
m_domain(domain),
m_domainPosition{-1},
m_requirementsPosition{-1},
m_objectsPosition{-1},
m_initialStatePosition{-1},
m_goalPosition{-1}
m_domainPosition{tokenize::InvalidStreamPosition},
m_requirementsPosition{tokenize::InvalidStreamPosition},
m_objectsPosition{tokenize::InvalidStreamPosition},
m_initialStatePosition{tokenize::InvalidStreamPosition},
m_goalPosition{tokenize::InvalidStreamPosition}
{
}
@ -50,10 +50,10 @@ void Problem::findSections()
const auto setSectionPosition =
[&](const std::string &sectionName, auto &sectionPosition, const auto value, bool unique = false)
{
if (unique && sectionPosition != -1)
if (unique && sectionPosition != tokenize::InvalidStreamPosition)
{
tokenizer.seek(value);
throw tokenize::TokenizerException(tokenizer.location(), "only one “:" + sectionName + "” section allowed");
throw tokenize::TokenizerException(tokenizer, "only one “:" + sectionName + "” section allowed");
}
sectionPosition = value;
@ -89,7 +89,7 @@ void Problem::findSections()
const auto sectionIdentifier = tokenizer.getIdentifier();
m_context.logger.log(output::Priority::Warning, tokenizer.location(), "section type “" + sectionIdentifier + "” currently unsupported");
m_context.logger.log(output::Priority::Warning, tokenizer, "section type “" + sectionIdentifier + "” currently unsupported");
tokenizer.seek(sectionIdentifierPosition);
}
@ -98,7 +98,7 @@ void Problem::findSections()
const auto sectionIdentifier = tokenizer.getIdentifier();
tokenizer.seek(position);
throw tokenize::TokenizerException(tokenizer.location(), "unknown problem section “" + sectionIdentifier + "");
throw tokenize::TokenizerException(tokenizer, "unknown problem section “" + sectionIdentifier + "");
}
// Skip section for now and parse it later
@ -116,31 +116,31 @@ void Problem::parse()
{
auto &tokenizer = m_context.tokenizer;
if (m_domainPosition == -1)
if (m_domainPosition == tokenize::InvalidStreamPosition)
throw ConsistencyException("problem description does not specify the corresponding domain");
tokenizer.seek(m_domainPosition);
parseDomainSection();
if (m_requirementsPosition != -1)
if (m_requirementsPosition != tokenize::InvalidStreamPosition)
{
tokenizer.seek(m_requirementsPosition);
parseRequirementSection();
}
if (m_objectsPosition != -1)
if (m_objectsPosition != tokenize::InvalidStreamPosition)
{
tokenizer.seek(m_objectsPosition);
parseObjectSection();
}
if (m_initialStatePosition == -1)
if (m_initialStatePosition == tokenize::InvalidStreamPosition)
throw ConsistencyException("problem description does not specify an initial state");
tokenizer.seek(m_initialStatePosition);
parseInitialStateSection();
if (m_goalPosition == -1)
if (m_goalPosition == tokenize::InvalidStreamPosition)
throw ConsistencyException("problem description does not specify a goal");
tokenizer.seek(m_goalPosition);
@ -204,7 +204,7 @@ void Problem::parseDomainSection()
const auto domainName = tokenizer.getIdentifier();
if (m_domain.name() != domainName)
throw tokenize::TokenizerException(tokenizer.location(), "domains do not match (“" + m_domain.name() + "” and “" + domainName + "”)");
throw tokenize::TokenizerException(tokenizer, "domains do not match (“" + m_domain.name() + "” and “" + domainName + "”)");
tokenizer.expect<std::string>(")");
}
@ -261,7 +261,7 @@ void Problem::checkRequirement(Requirement::Type requirementType)
if (hasRequirement(requirementType))
return;
m_context.logger.log(output::Priority::Warning, m_context.tokenizer.location(), "requirement “" + Requirement(requirementType).toPDDL() + "” used but never declared");
m_context.logger.log(output::Priority::Warning, m_context.tokenizer, "requirement “" + Requirement(requirementType).toPDDL() + "” used but never declared");
m_requirements.push_back(requirementType);
}

View File

@ -89,12 +89,12 @@ Requirement Requirement::parse(Context &context)
const auto match = requirementTypesToPDDL.right.find(requirementName);
if (match == requirementTypesToPDDL.right.end())
throw tokenize::TokenizerException(tokenizer.location(), "unknown PDDL requirement “" + requirementName + "");
throw tokenize::TokenizerException(tokenizer, "unknown PDDL requirement “" + requirementName + "");
const auto requirementType = match->second;
if (requirementType == Requirement::Type::GoalUtilities)
context.logger.log(output::Priority::Warning, tokenizer.location(), "requirement “goal-utilities” is not part of the PDDL 3.1 specification");
context.logger.log(output::Priority::Warning, tokenizer, "requirement “goal-utilities” is not part of the PDDL 3.1 specification");
return Requirement(match->second);
}

View File

@ -53,7 +53,7 @@ expressions::VariablePointer VariableStack::parseAndFind(plasp::pddl::Context &c
return match->get();
}
throw tokenize::TokenizerException(tokenizer.location(), "variable “" + variableName + "” used but never declared");
throw tokenize::TokenizerException(tokenizer, "variable “" + variableName + "” used but never declared");
}
////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -115,7 +115,7 @@ void Constant::parseTypedDeclarations(Context &context, Domain &domain)
domain.checkRequirement(Requirement::Type::Typing);
// If no types are given, check that typing is not a requirement
else if (domain.hasRequirement(Requirement::Type::Typing))
throw tokenize::TokenizerException(tokenizer.location(), "constant has undeclared type");
throw tokenize::TokenizerException(tokenizer, "constant has undeclared type");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@ -142,7 +142,7 @@ void Constant::parseTypedDeclarations(Context &context, Problem &problem)
problem.checkRequirement(Requirement::Type::Typing);
// If no types are given, check that typing is not a requirement
else if (problem.hasRequirement(Requirement::Type::Typing))
throw tokenize::TokenizerException(tokenizer.location(), "constant has undeclared type");
throw tokenize::TokenizerException(tokenizer, "constant has undeclared type");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@ -160,7 +160,7 @@ ConstantPointer Constant::parseAndFind(Context &context, const Domain &domain)
if (constant != nullptr)
return constant;
throw tokenize::TokenizerException(tokenizer.location(), "constant “" + constantName + "” used but never declared");
throw tokenize::TokenizerException(tokenizer, "constant “" + constantName + "” used but never declared");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@ -183,7 +183,7 @@ ConstantPointer Constant::parseAndFind(Context &context, const Problem &problem)
if (constant)
return constant;
throw tokenize::TokenizerException(tokenizer.location(), "constant “" + constantName + "” used but never declared");
throw tokenize::TokenizerException(tokenizer, "constant “" + constantName + "” used but never declared");
}
////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -126,7 +126,7 @@ PredicatePointer Predicate::parse(Context &context, const Problem &problem)
while (tokenizer.currentCharacter() != ')')
{
if (tokenizer.currentCharacter() == '?')
throw tokenize::TokenizerException(tokenizer.location(), "variables not allowed in this context");
throw tokenize::TokenizerException(tokenizer, "variables not allowed in this context");
// Parse objects and constants
const auto constant = Constant::parseAndFind(context, problem);

View File

@ -113,7 +113,7 @@ PrimitiveTypePointer PrimitiveType::parseAndFind(Context &context, Domain &domai
const auto typeName = tokenizer.getIdentifier();
if (typeName.empty())
throw tokenize::TokenizerException(tokenizer.location(), "no type supplied");
throw tokenize::TokenizerException(tokenizer, "no type supplied");
const auto match = std::find_if(types.cbegin(), types.cend(),
[&](const auto &primitiveType)
@ -126,11 +126,11 @@ PrimitiveTypePointer PrimitiveType::parseAndFind(Context &context, Domain &domai
// Only "object" is allowed as an implicit type
if (typeName == "object" || typeName == "objects")
{
context.logger.log(output::Priority::Warning, tokenizer.location(), "primitive type “" + typeName + "” should be declared");
context.logger.log(output::Priority::Warning, tokenizer, "primitive type “" + typeName + "” should be declared");
types.emplace_back(PrimitiveTypePointer(new PrimitiveType(typeName)));
}
else
throw tokenize::TokenizerException(tokenizer.location(), "type “" + typeName + "” used but never declared");
throw tokenize::TokenizerException(tokenizer, "type “" + typeName + "” used but never declared");
return types.back().get();
}

View File

@ -25,7 +25,7 @@ UnsupportedPointer Unsupported::parse(Context &context)
expression->m_type = tokenizer.getIdentifier();
context.logger.log(output::Priority::Warning, tokenizer.location(), "expression type “" + expression->m_type + "” currently unsupported in this context");
context.logger.log(output::Priority::Warning, tokenizer, "expression type “" + expression->m_type + "” currently unsupported in this context");
skipSection(tokenizer);

View File

@ -61,7 +61,7 @@ void Variable::parseDeclaration(Context &context, Variables &parameters)
});
if (match != parameters.cend())
throw tokenize::TokenizerException(tokenizer.location(), "variable “" + variable->m_name + "” already declared in this scope");
throw tokenize::TokenizerException(tokenizer, "variable “" + variable->m_name + "” already declared in this scope");
// Flag variable for potentially upcoming type declaration
variable->setDirty();
@ -138,7 +138,7 @@ void Variable::parseTypedDeclarations(Context &context, ExpressionContext &expre
expressionContext.checkRequirement(Requirement::Type::Typing);
// If no types are given, check that typing is not a requirement
else if (expressionContext.hasRequirement(Requirement::Type::Typing))
throw tokenize::TokenizerException(tokenizer.location(), "variable has undeclared type");
throw tokenize::TokenizerException(tokenizer, "variable has undeclared type");
}
////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -173,7 +173,7 @@ void Description::parseContent(tokenize::Tokenizer<> &tokenizer)
tokenizer.skipWhiteSpace();
if (!tokenizer.atEnd())
throw tokenize::TokenizerException(tokenizer.location(), "expected end of SAS description (perhaps, input contains two SAS descriptions?)");
throw tokenize::TokenizerException(tokenizer, "expected end of SAS description (perhaps, input contains two SAS descriptions?)");
}
////////////////////////////////////////////////////////////////////////////////////////////////////
@ -185,7 +185,7 @@ void Description::parseVersionSection(tokenize::Tokenizer<> &tokenizer) const
const auto formatVersion = tokenizer.get<size_t>();
if (formatVersion != 3)
throw tokenize::TokenizerException(tokenizer.location(), "unsupported SAS format version (" + std::to_string(formatVersion) + ")");
throw tokenize::TokenizerException(tokenizer, "unsupported SAS format version (" + std::to_string(formatVersion) + ")");
tokenizer.expect<std::string>("end_version");
}

View File

@ -29,7 +29,7 @@ MutexGroup MutexGroup::fromSAS(tokenize::Tokenizer<> &tokenizer, const Variables
mutexGroup.m_facts.emplace_back(Fact::fromSAS(tokenizer, variables));
if (mutexGroup.m_facts[j].value() == Value::None)
throw tokenize::TokenizerException(tokenizer.location(), "mutex groups must not contain <none of those> values");
throw tokenize::TokenizerException(tokenizer, "mutex groups must not contain <none of those> values");
}
tokenizer.expect<std::string>("end_mutex_group");

View File

@ -44,7 +44,7 @@ Predicate Predicate::fromSAS(tokenize::Tokenizer<> &tokenizer)
}
catch (const std::exception &e)
{
throw tokenize::TokenizerException(tokenizer.location(), "could not parse operator predicate");
throw tokenize::TokenizerException(tokenizer, "could not parse operator predicate");
}
return predicate;

View File

@ -75,7 +75,7 @@ Value Value::fromSAS(tokenize::Tokenizer<> &tokenizer)
else if (sasSign == "NegatedAtom")
value.m_sign = Value::Sign::Negative;
else
throw tokenize::TokenizerException(tokenizer.location(), "invalid value sign “" + sasSign + "");
throw tokenize::TokenizerException(tokenizer, "invalid value sign “" + sasSign + "");
try
{
@ -91,7 +91,7 @@ Value Value::fromSAS(tokenize::Tokenizer<> &tokenizer)
}
catch (const std::exception &e)
{
throw tokenize::TokenizerException(tokenizer.location(), std::string("could not parse variable value (") + e.what() + ")");
throw tokenize::TokenizerException(tokenizer, std::string("could not parse variable value (") + e.what() + ")");
}
return value;
@ -107,7 +107,7 @@ const Value &Value::referenceFromSAS(tokenize::Tokenizer<> &tokenizer, const Var
return Value::Any;
if (valueID < 0 || static_cast<size_t>(valueID) >= variable.values().size())
throw tokenize::TokenizerException(tokenizer.location(), "value index out of range (variable " + variable.name() + ", index " + std::to_string(valueID) + ")");
throw tokenize::TokenizerException(tokenizer, "value index out of range (variable " + variable.name() + ", index " + std::to_string(valueID) + ")");
return variable.values()[valueID];
}

View File

@ -43,7 +43,7 @@ Variable Variable::fromSAS(tokenize::Tokenizer<> &tokenizer)
// <none of those> values are only allowed at the end
if (j < numberOfValues - 1 && variable.m_values[j] == Value::None)
throw tokenize::TokenizerException(tokenizer.location(), "<none of those> value must be the last value of a variable");
throw tokenize::TokenizerException(tokenizer, "<none of those> value must be the last value of a variable");
}
tokenizer.expect<std::string>("end_variable");
@ -66,7 +66,7 @@ const Variable &Variable::referenceFromSAS(tokenize::Tokenizer<> &tokenizer, con
const auto variableID = tokenizer.get<size_t>();
if (variableID >= variables.size())
throw tokenize::TokenizerException(tokenizer.location(), "variable index out of range (index " + std::to_string(variableID) + ")");
throw tokenize::TokenizerException(tokenizer, "variable index out of range (index " + std::to_string(variableID) + ")");
return variables[variableID];
}