♻️ implemented a non-recursive parser

This commit is contained in:
Niels Lohmann 2018-03-17 14:46:50 +01:00
parent 27cf05af8d
commit c87ffad45c
No known key found for this signature in database
GPG key ID: 7F3CEA63AE251B69
2 changed files with 526 additions and 298 deletions

View file

@ -431,6 +431,18 @@ class parser
bool sax_parse_internal(json_sax_t* sax) bool sax_parse_internal(json_sax_t* sax)
{ {
// two values for the structured values
enum class parse_state_t { array_value, object_value };
// stack to remember the hieararchy of structured values we are parsing
std::vector<parse_state_t> states;
// value to avoid a goto (see comment where set to true)
bool skip_to_state_evaluation = false;
while (true)
{
if (not skip_to_state_evaluation)
{
// invariant: get_token() was called before each iteration
switch (last_token) switch (last_token)
{ {
case token_type::begin_object: case token_type::begin_object:
@ -444,14 +456,15 @@ class parser
get_token(); get_token();
// closing } -> we are done // closing } -> we are done
if (JSON_UNLIKELY(last_token == token_type::end_object)) if (last_token == token_type::end_object)
{ {
return sax->end_object(); if (not sax->end_object())
{
return false;
}
break;
} }
// parse values
while (true)
{
// parse key // parse key
if (JSON_UNLIKELY(last_token != token_type::value_string)) if (JSON_UNLIKELY(last_token != token_type::value_string))
{ {
@ -476,36 +489,14 @@ class parser
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator))); parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator)));
} }
// parse value // remember we are now inside an object
get_token(); states.push_back(parse_state_t::object_value);
if (not sax_parse_internal(sax))
{
return false;
}
// comma -> next value // parse values
get_token();
if (last_token == token_type::value_separator)
{
get_token(); get_token();
continue; continue;
} }
// closing }
if (JSON_LIKELY(last_token == token_type::end_object))
{
return sax->end_object();
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object)));
}
}
}
case token_type::begin_array: case token_type::begin_array:
{ {
if (not sax->start_array()) if (not sax->start_array())
@ -519,40 +510,20 @@ class parser
// closing ] -> we are done // closing ] -> we are done
if (last_token == token_type::end_array) if (last_token == token_type::end_array)
{ {
return sax->end_array(); if (not sax->end_array())
}
// parse values
while (true)
{
// parse value
if (not sax_parse_internal(sax))
{ {
return false; return false;
} }
break;
}
// comma -> next value // remember we are now inside an array
get_token(); states.push_back(parse_state_t::array_value);
if (last_token == token_type::value_separator)
{ // parse values (no need to call get_token)
get_token();
continue; continue;
} }
// closing ]
if (JSON_LIKELY(last_token == token_type::end_array))
{
return sax->end_array();
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array)));
}
}
}
case token_type::value_float: case token_type::value_float:
{ {
const auto res = m_lexer.get_number_float(); const auto res = m_lexer.get_number_float();
@ -565,38 +536,66 @@ class parser
} }
else else
{ {
return sax->number_float(res, m_lexer.move_string()); if (not sax->number_float(res, m_lexer.move_string()))
{
return false;
}
break;
} }
} }
case token_type::literal_false: case token_type::literal_false:
{ {
return sax->boolean(false); if (not sax->boolean(false))
{
return false;
}
break;
} }
case token_type::literal_null: case token_type::literal_null:
{ {
return sax->null(); if (not sax->null())
{
return false;
}
break;
} }
case token_type::literal_true: case token_type::literal_true:
{ {
return sax->boolean(true); if (not sax->boolean(true))
{
return false;
}
break;
} }
case token_type::value_integer: case token_type::value_integer:
{ {
return sax->number_integer(m_lexer.get_number_integer()); if (not sax->number_integer(m_lexer.get_number_integer()))
{
return false;
}
break;
} }
case token_type::value_string: case token_type::value_string:
{ {
return sax->string(m_lexer.move_string()); if (not sax->string(m_lexer.move_string()))
{
return false;
}
break;
} }
case token_type::value_unsigned: case token_type::value_unsigned:
{ {
return sax->number_unsigned(m_lexer.get_number_unsigned()); if (not sax->number_unsigned(m_lexer.get_number_unsigned()))
{
return false;
}
break;
} }
case token_type::parse_error: case token_type::parse_error:
@ -615,6 +614,123 @@ class parser
} }
} }
} }
else
{
skip_to_state_evaluation = false;
}
// we reached this line after we successfully parsed a value
if (states.empty())
{
// empty stack: we reached the end of the hieararchy: done
return true;
}
else
{
get_token();
switch (states.back())
{
case parse_state_t::array_value:
{
// comma -> next value
if (last_token == token_type::value_separator)
{
// parse a new value
get_token();
continue;
}
// closing ]
if (JSON_LIKELY(last_token == token_type::end_array))
{
if (not sax->end_array())
{
return false;
}
// We are done with this array. Before we can parse
// a new value, we need to evaluate the new state
// first. By setting skip_to_state_evaluation to
// false, we are effectively jumping to the
// beginning of this switch.
assert(not states.empty());
states.pop_back();
skip_to_state_evaluation = true;
continue;
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array)));
}
}
case parse_state_t::object_value:
{
// comma -> next value
if (last_token == token_type::value_separator)
{
get_token();
// parse key
if (JSON_UNLIKELY(last_token != token_type::value_string))
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::value_string)));
}
else
{
if (not sax->key(m_lexer.move_string()))
{
return false;
}
}
// parse separator (:)
get_token();
if (JSON_UNLIKELY(last_token != token_type::name_separator))
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator)));
}
// parse values
get_token();
continue;
}
// closing }
if (JSON_LIKELY(last_token == token_type::end_object))
{
if (not sax->end_object())
{
return false;
}
// We are done with this object. Before we can
// parse a new value, we need to evaluate the new
// state first. By setting skip_to_state_evaluation
// to false, we are effectively jumping to the
// beginning of this switch.
assert(not states.empty());
states.pop_back();
skip_to_state_evaluation = true;
continue;
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object)));
}
}
}
}
}
}
/// get next token from lexer /// get next token from lexer
token_type get_token() token_type get_token()

View file

@ -3941,6 +3941,18 @@ class parser
bool sax_parse_internal(json_sax_t* sax) bool sax_parse_internal(json_sax_t* sax)
{ {
// two values for the structured values
enum class parse_state_t { array_value, object_value };
// stack to remember the hieararchy of structured values we are parsing
std::vector<parse_state_t> states;
// value to avoid a goto (see comment where set to true)
bool skip_to_tail = false;
while (true)
{
if (not skip_to_tail)
{
// invariant: get_token() was called before each iteration
switch (last_token) switch (last_token)
{ {
case token_type::begin_object: case token_type::begin_object:
@ -3954,14 +3966,15 @@ class parser
get_token(); get_token();
// closing } -> we are done // closing } -> we are done
if (JSON_UNLIKELY(last_token == token_type::end_object)) if (last_token == token_type::end_object)
{ {
return sax->end_object(); if (not sax->end_object())
{
return false;
}
break;
} }
// parse values
while (true)
{
// parse key // parse key
if (JSON_UNLIKELY(last_token != token_type::value_string)) if (JSON_UNLIKELY(last_token != token_type::value_string))
{ {
@ -3986,36 +3999,12 @@ class parser
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator))); parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator)));
} }
// parse value // parse values
get_token();
if (not sax_parse_internal(sax))
{
return false;
}
// comma -> next value
get_token();
if (last_token == token_type::value_separator)
{
get_token(); get_token();
states.push_back(parse_state_t::object_value);
continue; continue;
} }
// closing }
if (JSON_LIKELY(last_token == token_type::end_object))
{
return sax->end_object();
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object)));
}
}
}
case token_type::begin_array: case token_type::begin_array:
{ {
if (not sax->start_array()) if (not sax->start_array())
@ -4029,40 +4018,18 @@ class parser
// closing ] -> we are done // closing ] -> we are done
if (last_token == token_type::end_array) if (last_token == token_type::end_array)
{ {
return sax->end_array(); if (not sax->end_array())
}
// parse values
while (true)
{
// parse value
if (not sax_parse_internal(sax))
{ {
return false; return false;
} }
break;
}
// comma -> next value // parse values (no need to call get_token)
get_token(); states.push_back(parse_state_t::array_value);
if (last_token == token_type::value_separator)
{
get_token();
continue; continue;
} }
// closing ]
if (JSON_LIKELY(last_token == token_type::end_array))
{
return sax->end_array();
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array)));
}
}
}
case token_type::value_float: case token_type::value_float:
{ {
const auto res = m_lexer.get_number_float(); const auto res = m_lexer.get_number_float();
@ -4075,38 +4042,66 @@ class parser
} }
else else
{ {
return sax->number_float(res, m_lexer.move_string()); if (not sax->number_float(res, m_lexer.move_string()))
{
return false;
}
break;
} }
} }
case token_type::literal_false: case token_type::literal_false:
{ {
return sax->boolean(false); if (not sax->boolean(false))
{
return false;
}
break;
} }
case token_type::literal_null: case token_type::literal_null:
{ {
return sax->null(); if (not sax->null())
{
return false;
}
break;
} }
case token_type::literal_true: case token_type::literal_true:
{ {
return sax->boolean(true); if (not sax->boolean(true))
{
return false;
}
break;
} }
case token_type::value_integer: case token_type::value_integer:
{ {
return sax->number_integer(m_lexer.get_number_integer()); if (not sax->number_integer(m_lexer.get_number_integer()))
{
return false;
}
break;
} }
case token_type::value_string: case token_type::value_string:
{ {
return sax->string(m_lexer.move_string()); if (not sax->string(m_lexer.move_string()))
{
return false;
}
break;
} }
case token_type::value_unsigned: case token_type::value_unsigned:
{ {
return sax->number_unsigned(m_lexer.get_number_unsigned()); if (not sax->number_unsigned(m_lexer.get_number_unsigned()))
{
return false;
}
break;
} }
case token_type::parse_error: case token_type::parse_error:
@ -4125,6 +4120,123 @@ class parser
} }
} }
} }
else
{
skip_to_tail = false;
}
// we reached this line after we successfully parsed a value
if (states.empty())
{
// empty stack: we reached the end of the hieararchy: done
return true;
}
else
{
get_token();
switch (states.back())
{
case parse_state_t::array_value:
{
// comma -> next value
if (last_token == token_type::value_separator)
{
// parse a new value
get_token();
continue;
}
// closing ]
if (JSON_LIKELY(last_token == token_type::end_array))
{
if (not sax->end_array())
{
return false;
}
// We are done with this array. Before we can parse
// a new value, we need to evaluate the new state
// first. By setting skip_to_tail to false, we are
// effectively jumping to the beginning of this
// switch.
assert(not states.empty());
states.pop_back();
skip_to_tail = true;
continue;
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array)));
}
}
case parse_state_t::object_value:
{
// comma -> next value
if (last_token == token_type::value_separator)
{
get_token();
// parse key
if (JSON_UNLIKELY(last_token != token_type::value_string))
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::value_string)));
}
else
{
if (not sax->key(m_lexer.move_string()))
{
return false;
}
}
// parse separator (:)
get_token();
if (JSON_UNLIKELY(last_token != token_type::name_separator))
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator)));
}
// parse values
get_token();
continue;
}
// closing }
if (JSON_LIKELY(last_token == token_type::end_object))
{
if (not sax->end_object())
{
return false;
}
// We are done with this object. Before we can
// parse a new value, we need to evaluate the new
// state first. By setting skip_to_tail to false,
// we are effectively jumping to the beginning of
// this switch.
assert(not states.empty());
states.pop_back();
skip_to_tail = true;
continue;
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object)));
}
}
}
}
}
}
/// get next token from lexer /// get next token from lexer
token_type get_token() token_type get_token()