🐛 fix lexer to properly cope with repeated comments #2330
This commit is contained in:
parent
1da931730a
commit
3888b1642a
4 changed files with 12 additions and 2 deletions
|
@ -1511,7 +1511,7 @@ scan_number_done:
|
|||
skip_whitespace();
|
||||
|
||||
// ignore comments
|
||||
if (ignore_comments && current == '/')
|
||||
while (ignore_comments && current == '/')
|
||||
{
|
||||
if (!scan_comment())
|
||||
{
|
||||
|
|
|
@ -7390,7 +7390,7 @@ scan_number_done:
|
|||
skip_whitespace();
|
||||
|
||||
// ignore comments
|
||||
if (ignore_comments && current == '/')
|
||||
while (ignore_comments && current == '/')
|
||||
{
|
||||
if (!scan_comment())
|
||||
{
|
||||
|
|
|
@ -241,5 +241,8 @@ TEST_CASE("lexer class")
|
|||
CHECK((scan_string("/* true */", true) == json::lexer::token_type::end_of_input));
|
||||
CHECK((scan_string("/*/**/", true) == json::lexer::token_type::end_of_input));
|
||||
CHECK((scan_string("/*/* */", true) == json::lexer::token_type::end_of_input));
|
||||
|
||||
CHECK((scan_string("//\n//\n", true) == json::lexer::token_type::end_of_input));
|
||||
CHECK((scan_string("/**//**//**/", true) == json::lexer::token_type::end_of_input));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -478,4 +478,11 @@ TEST_CASE("regression tests 2")
|
|||
CHECK(jsonObj["aaaa"] == 11);
|
||||
CHECK(jsonObj["bbb"] == 222);
|
||||
}
|
||||
|
||||
SECTION("issue #2330 - ignore_comment=true fails on multiple consecutive lines starting with comments")
|
||||
{
|
||||
std::string ss = "//\n//\n{\n}\n";
|
||||
json j = json::parse(ss, nullptr, true, true);
|
||||
CHECK(j.dump() == "{}");
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue