🎨 cleanup
This commit is contained in:
parent
37a72dac48
commit
b224c52376
4 changed files with 153 additions and 169 deletions
2
Makefile
2
Makefile
|
@ -442,7 +442,7 @@ fuzzing-stop:
|
||||||
|
|
||||||
# call cppcheck on the main header file
|
# call cppcheck on the main header file
|
||||||
cppcheck:
|
cppcheck:
|
||||||
cppcheck --enable=warning --inconclusive --force --std=c++11 $(SRCS) --error-exitcode=1
|
cppcheck --enable=warning --inline-suppr --inconclusive --force --std=c++11 $(SRCS) --error-exitcode=1
|
||||||
|
|
||||||
# compile and check with Clang Static Analyzer
|
# compile and check with Clang Static Analyzer
|
||||||
clang_analyze:
|
clang_analyze:
|
||||||
|
|
|
@ -262,14 +262,13 @@ class parser
|
||||||
m_lexer.get_token_string(),
|
m_lexer.get_token_string(),
|
||||||
out_of_range::create(406, "number overflow parsing '" + m_lexer.get_token_string() + "'"));
|
out_of_range::create(406, "number overflow parsing '" + m_lexer.get_token_string() + "'"));
|
||||||
}
|
}
|
||||||
else
|
|
||||||
|
if (JSON_UNLIKELY(not sax->number_float(res, m_lexer.get_string())))
|
||||||
{
|
{
|
||||||
if (JSON_UNLIKELY(not sax->number_float(res, m_lexer.get_string())))
|
return false;
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case token_type::literal_false:
|
case token_type::literal_false:
|
||||||
|
@ -355,103 +354,95 @@ class parser
|
||||||
// empty stack: we reached the end of the hierarchy: done
|
// empty stack: we reached the end of the hierarchy: done
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else
|
|
||||||
|
if (states.back()) // array
|
||||||
{
|
{
|
||||||
if (states.back()) // array
|
// comma -> next value
|
||||||
|
if (get_token() == token_type::value_separator)
|
||||||
{
|
{
|
||||||
// comma -> next value
|
// parse a new value
|
||||||
if (get_token() == token_type::value_separator)
|
get_token();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// closing ]
|
||||||
|
if (JSON_LIKELY(last_token == token_type::end_array))
|
||||||
|
{
|
||||||
|
if (JSON_UNLIKELY(not sax->end_array()))
|
||||||
{
|
{
|
||||||
// parse a new value
|
return false;
|
||||||
get_token();
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// closing ]
|
// We are done with this array. Before we can parse a
|
||||||
if (JSON_LIKELY(last_token == token_type::end_array))
|
// new value, we need to evaluate the new state first.
|
||||||
{
|
// By setting skip_to_state_evaluation to false, we
|
||||||
if (JSON_UNLIKELY(not sax->end_array()))
|
// are effectively jumping to the beginning of this if.
|
||||||
{
|
assert(not states.empty());
|
||||||
return false;
|
states.pop_back();
|
||||||
}
|
skip_to_state_evaluation = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// We are done with this array. Before we can parse a
|
return sax->parse_error(m_lexer.get_position(),
|
||||||
// new value, we need to evaluate the new state first.
|
m_lexer.get_token_string(),
|
||||||
// By setting skip_to_state_evaluation to false, we
|
parse_error::create(101, m_lexer.get_position(),
|
||||||
// are effectively jumping to the beginning of this if.
|
exception_message(token_type::end_array, "array")));
|
||||||
assert(not states.empty());
|
}
|
||||||
states.pop_back();
|
else // object
|
||||||
skip_to_state_evaluation = true;
|
{
|
||||||
continue;
|
// comma -> next value
|
||||||
}
|
if (get_token() == token_type::value_separator)
|
||||||
else
|
{
|
||||||
|
// parse key
|
||||||
|
if (JSON_UNLIKELY(get_token() != token_type::value_string))
|
||||||
{
|
{
|
||||||
return sax->parse_error(m_lexer.get_position(),
|
return sax->parse_error(m_lexer.get_position(),
|
||||||
m_lexer.get_token_string(),
|
m_lexer.get_token_string(),
|
||||||
parse_error::create(101, m_lexer.get_position(),
|
parse_error::create(101, m_lexer.get_position(),
|
||||||
exception_message(token_type::end_array, "array")));
|
exception_message(token_type::value_string, "object key")));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else // object
|
if (JSON_UNLIKELY(not sax->key(m_lexer.get_string())))
|
||||||
{
|
|
||||||
// comma -> next value
|
|
||||||
if (get_token() == token_type::value_separator)
|
|
||||||
{
|
{
|
||||||
// parse key
|
return false;
|
||||||
if (JSON_UNLIKELY(get_token() != token_type::value_string))
|
|
||||||
{
|
|
||||||
return sax->parse_error(m_lexer.get_position(),
|
|
||||||
m_lexer.get_token_string(),
|
|
||||||
parse_error::create(101, m_lexer.get_position(),
|
|
||||||
exception_message(token_type::value_string, "object key")));
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (JSON_UNLIKELY(not sax->key(m_lexer.get_string())))
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse separator (:)
|
|
||||||
if (JSON_UNLIKELY(get_token() != token_type::name_separator))
|
|
||||||
{
|
|
||||||
return sax->parse_error(m_lexer.get_position(),
|
|
||||||
m_lexer.get_token_string(),
|
|
||||||
parse_error::create(101, m_lexer.get_position(),
|
|
||||||
exception_message(token_type::name_separator, "object separator")));
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse values
|
|
||||||
get_token();
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// closing }
|
// parse separator (:)
|
||||||
if (JSON_LIKELY(last_token == token_type::end_object))
|
if (JSON_UNLIKELY(get_token() != token_type::name_separator))
|
||||||
{
|
|
||||||
if (JSON_UNLIKELY(not sax->end_object()))
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// We are done with this object. Before we can parse a
|
|
||||||
// new value, we need to evaluate the new state first.
|
|
||||||
// By setting skip_to_state_evaluation to false, we
|
|
||||||
// are effectively jumping to the beginning of this if.
|
|
||||||
assert(not states.empty());
|
|
||||||
states.pop_back();
|
|
||||||
skip_to_state_evaluation = true;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
{
|
||||||
return sax->parse_error(m_lexer.get_position(),
|
return sax->parse_error(m_lexer.get_position(),
|
||||||
m_lexer.get_token_string(),
|
m_lexer.get_token_string(),
|
||||||
parse_error::create(101, m_lexer.get_position(),
|
parse_error::create(101, m_lexer.get_position(),
|
||||||
exception_message(token_type::end_object, "object")));
|
exception_message(token_type::name_separator, "object separator")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse values
|
||||||
|
get_token();
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// closing }
|
||||||
|
if (JSON_LIKELY(last_token == token_type::end_object))
|
||||||
|
{
|
||||||
|
if (JSON_UNLIKELY(not sax->end_object()))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We are done with this object. Before we can parse a
|
||||||
|
// new value, we need to evaluate the new state first.
|
||||||
|
// By setting skip_to_state_evaluation to false, we
|
||||||
|
// are effectively jumping to the beginning of this if.
|
||||||
|
assert(not states.empty());
|
||||||
|
states.pop_back();
|
||||||
|
skip_to_state_evaluation = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
return sax->parse_error(m_lexer.get_position(),
|
||||||
|
m_lexer.get_token_string(),
|
||||||
|
parse_error::create(101, m_lexer.get_position(),
|
||||||
|
exception_message(token_type::end_object, "object")));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4777,7 +4777,8 @@ class basic_json
|
||||||
|
|
||||||
// add element to array (move semantics)
|
// add element to array (move semantics)
|
||||||
m_value.array->push_back(std::move(val));
|
m_value.array->push_back(std::move(val));
|
||||||
// invalidate object
|
// invalidate object: mark it null so we do not call the destructor
|
||||||
|
// cppcheck-suppress accessMoved
|
||||||
val.m_type = value_t::null;
|
val.m_type = value_t::null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7276,14 +7276,13 @@ class parser
|
||||||
m_lexer.get_token_string(),
|
m_lexer.get_token_string(),
|
||||||
out_of_range::create(406, "number overflow parsing '" + m_lexer.get_token_string() + "'"));
|
out_of_range::create(406, "number overflow parsing '" + m_lexer.get_token_string() + "'"));
|
||||||
}
|
}
|
||||||
else
|
|
||||||
|
if (JSON_UNLIKELY(not sax->number_float(res, m_lexer.get_string())))
|
||||||
{
|
{
|
||||||
if (JSON_UNLIKELY(not sax->number_float(res, m_lexer.get_string())))
|
return false;
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case token_type::literal_false:
|
case token_type::literal_false:
|
||||||
|
@ -7369,103 +7368,95 @@ class parser
|
||||||
// empty stack: we reached the end of the hierarchy: done
|
// empty stack: we reached the end of the hierarchy: done
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else
|
|
||||||
|
if (states.back()) // array
|
||||||
{
|
{
|
||||||
if (states.back()) // array
|
// comma -> next value
|
||||||
|
if (get_token() == token_type::value_separator)
|
||||||
{
|
{
|
||||||
// comma -> next value
|
// parse a new value
|
||||||
if (get_token() == token_type::value_separator)
|
get_token();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// closing ]
|
||||||
|
if (JSON_LIKELY(last_token == token_type::end_array))
|
||||||
|
{
|
||||||
|
if (JSON_UNLIKELY(not sax->end_array()))
|
||||||
{
|
{
|
||||||
// parse a new value
|
return false;
|
||||||
get_token();
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// closing ]
|
// We are done with this array. Before we can parse a
|
||||||
if (JSON_LIKELY(last_token == token_type::end_array))
|
// new value, we need to evaluate the new state first.
|
||||||
{
|
// By setting skip_to_state_evaluation to false, we
|
||||||
if (JSON_UNLIKELY(not sax->end_array()))
|
// are effectively jumping to the beginning of this if.
|
||||||
{
|
assert(not states.empty());
|
||||||
return false;
|
states.pop_back();
|
||||||
}
|
skip_to_state_evaluation = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// We are done with this array. Before we can parse a
|
return sax->parse_error(m_lexer.get_position(),
|
||||||
// new value, we need to evaluate the new state first.
|
m_lexer.get_token_string(),
|
||||||
// By setting skip_to_state_evaluation to false, we
|
parse_error::create(101, m_lexer.get_position(),
|
||||||
// are effectively jumping to the beginning of this if.
|
exception_message(token_type::end_array, "array")));
|
||||||
assert(not states.empty());
|
}
|
||||||
states.pop_back();
|
else // object
|
||||||
skip_to_state_evaluation = true;
|
{
|
||||||
continue;
|
// comma -> next value
|
||||||
}
|
if (get_token() == token_type::value_separator)
|
||||||
else
|
{
|
||||||
|
// parse key
|
||||||
|
if (JSON_UNLIKELY(get_token() != token_type::value_string))
|
||||||
{
|
{
|
||||||
return sax->parse_error(m_lexer.get_position(),
|
return sax->parse_error(m_lexer.get_position(),
|
||||||
m_lexer.get_token_string(),
|
m_lexer.get_token_string(),
|
||||||
parse_error::create(101, m_lexer.get_position(),
|
parse_error::create(101, m_lexer.get_position(),
|
||||||
exception_message(token_type::end_array, "array")));
|
exception_message(token_type::value_string, "object key")));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else // object
|
if (JSON_UNLIKELY(not sax->key(m_lexer.get_string())))
|
||||||
{
|
|
||||||
// comma -> next value
|
|
||||||
if (get_token() == token_type::value_separator)
|
|
||||||
{
|
{
|
||||||
// parse key
|
return false;
|
||||||
if (JSON_UNLIKELY(get_token() != token_type::value_string))
|
|
||||||
{
|
|
||||||
return sax->parse_error(m_lexer.get_position(),
|
|
||||||
m_lexer.get_token_string(),
|
|
||||||
parse_error::create(101, m_lexer.get_position(),
|
|
||||||
exception_message(token_type::value_string, "object key")));
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (JSON_UNLIKELY(not sax->key(m_lexer.get_string())))
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse separator (:)
|
|
||||||
if (JSON_UNLIKELY(get_token() != token_type::name_separator))
|
|
||||||
{
|
|
||||||
return sax->parse_error(m_lexer.get_position(),
|
|
||||||
m_lexer.get_token_string(),
|
|
||||||
parse_error::create(101, m_lexer.get_position(),
|
|
||||||
exception_message(token_type::name_separator, "object separator")));
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse values
|
|
||||||
get_token();
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// closing }
|
// parse separator (:)
|
||||||
if (JSON_LIKELY(last_token == token_type::end_object))
|
if (JSON_UNLIKELY(get_token() != token_type::name_separator))
|
||||||
{
|
|
||||||
if (JSON_UNLIKELY(not sax->end_object()))
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// We are done with this object. Before we can parse a
|
|
||||||
// new value, we need to evaluate the new state first.
|
|
||||||
// By setting skip_to_state_evaluation to false, we
|
|
||||||
// are effectively jumping to the beginning of this if.
|
|
||||||
assert(not states.empty());
|
|
||||||
states.pop_back();
|
|
||||||
skip_to_state_evaluation = true;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
{
|
||||||
return sax->parse_error(m_lexer.get_position(),
|
return sax->parse_error(m_lexer.get_position(),
|
||||||
m_lexer.get_token_string(),
|
m_lexer.get_token_string(),
|
||||||
parse_error::create(101, m_lexer.get_position(),
|
parse_error::create(101, m_lexer.get_position(),
|
||||||
exception_message(token_type::end_object, "object")));
|
exception_message(token_type::name_separator, "object separator")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse values
|
||||||
|
get_token();
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// closing }
|
||||||
|
if (JSON_LIKELY(last_token == token_type::end_object))
|
||||||
|
{
|
||||||
|
if (JSON_UNLIKELY(not sax->end_object()))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We are done with this object. Before we can parse a
|
||||||
|
// new value, we need to evaluate the new state first.
|
||||||
|
// By setting skip_to_state_evaluation to false, we
|
||||||
|
// are effectively jumping to the beginning of this if.
|
||||||
|
assert(not states.empty());
|
||||||
|
states.pop_back();
|
||||||
|
skip_to_state_evaluation = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
return sax->parse_error(m_lexer.get_position(),
|
||||||
|
m_lexer.get_token_string(),
|
||||||
|
parse_error::create(101, m_lexer.get_position(),
|
||||||
|
exception_message(token_type::end_object, "object")));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17420,7 +17411,8 @@ class basic_json
|
||||||
|
|
||||||
// add element to array (move semantics)
|
// add element to array (move semantics)
|
||||||
m_value.array->push_back(std::move(val));
|
m_value.array->push_back(std::move(val));
|
||||||
// invalidate object
|
// invalidate object: mark it null so we do not call the destructor
|
||||||
|
// cppcheck-suppress accessMoved
|
||||||
val.m_type = value_t::null;
|
val.m_type = value_t::null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue