Fixed accidentally truncated ends of files after removing comments.

This commit is contained in:
Patrick Lühne 2017-06-18 18:12:02 +02:00
parent dd7fb31309
commit 04dffdb09e
Signed by: patrick
GPG Key ID: 05F3611E97A70ABF
2 changed files with 38 additions and 4 deletions

View File

@ -315,17 +315,39 @@ void Tokenizer<TokenizerPolicy>::removeComments(const std::string &startSequence
m_position = 0; m_position = 0;
// TODO: refactor
while (!atEnd()) while (!atEnd())
{ {
while (!atEnd() && !testAndSkip(startSequence)) bool startSequenceFound = false;
while (!atEnd())
{
if ((startSequenceFound = testAndSkip(startSequence)))
break;
advance(); advance();
}
auto startPosition = m_position - startSequence.size(); if (!startSequenceFound && atEnd())
break;
const auto startPosition = m_position - startSequence.size();
bool endSequenceFound = false;
while (!atEnd())
{
if ((endSequenceFound = testAndSkip(endSequence)))
break;
while (!atEnd() && !testAndSkip(endSequence))
advance(); advance();
}
auto endPosition = (removeEnd) ? m_position : m_position - endSequence.size(); // If the end sequence is to be removed or could not be found, remove entire range
const auto endPosition =
(removeEnd || !endSequenceFound)
? m_position
: m_position - endSequence.size();
removeRange(startPosition, endPosition); removeRange(startPosition, endPosition);

View File

@ -329,4 +329,16 @@ TEST_CASE("[tokenizer] Comments are correctly removed", "[tokenizer]")
p3.skipWhiteSpace(); p3.skipWhiteSpace();
REQUIRE(p3.atEnd()); REQUIRE(p3.atEnd());
// Check that if there are no comments, the end is not accidentally truncated
std::stringstream s4("test foo bar");
tokenize::Tokenizer<> p4("input", s4);
p4.removeComments(";", "\n", false);
REQUIRE_NOTHROW(p4.expect<std::string>("test"));
REQUIRE_NOTHROW(p4.expect<std::string>("foo"));
REQUIRE_NOTHROW(p4.expect<std::string>("bar"));
REQUIRE(p4.atEnd());
} }