🔨 further cleanup
This commit is contained in:
parent
c2d55109c1
commit
c8191c8172
5 changed files with 61 additions and 57 deletions
2
Makefile
2
Makefile
|
@ -94,7 +94,7 @@ cppcheck:
|
|||
|
||||
# run clang sanitize (we are overrding the CXXFLAGS provided by travis in order to use gcc's libstdc++)
|
||||
clang_sanitize: clean
|
||||
CXX=clang++ CXXFLAGS="-g -O2 -fsanitize=address -fsanitize=undefined -fno-omit-frame-pointer" $(MAKE)
|
||||
CXX=clang++ CXXFLAGS="-g -O2 -fsanitize=address -fsanitize=undefined -fno-omit-frame-pointer" $(MAKE) check
|
||||
|
||||
|
||||
##########################################################################
|
||||
|
|
40
src/json.hpp
40
src/json.hpp
|
@ -9444,8 +9444,8 @@ class basic_json
|
|||
literal_false, ///< the `false` literal
|
||||
literal_null, ///< the `null` literal
|
||||
value_string, ///< a string -- use get_string() for actual value
|
||||
value_unsigned_integer, ///< an unsigned integer -- use get_number() for actual value
|
||||
value_signed_integer, ///< a signed integer -- use get_number() for actual value
|
||||
value_unsigned, ///< an unsigned integer -- use get_number() for actual value
|
||||
value_integer, ///< a signed integer -- use get_number() for actual value
|
||||
value_float, ///< an floating point number -- use get_number() for actual value
|
||||
begin_array, ///< the character for array begin `[`
|
||||
begin_object, ///< the character for object begin `{`
|
||||
|
@ -9598,8 +9598,8 @@ class basic_json
|
|||
return "null literal";
|
||||
case token_type::value_string:
|
||||
return "string literal";
|
||||
case lexer::token_type::value_unsigned_integer:
|
||||
case lexer::token_type::value_signed_integer:
|
||||
case lexer::token_type::value_unsigned:
|
||||
case lexer::token_type::value_integer:
|
||||
case lexer::token_type::value_float:
|
||||
return "number literal";
|
||||
case token_type::begin_array:
|
||||
|
@ -9903,7 +9903,7 @@ basic_json_parser_13:
|
|||
}
|
||||
basic_json_parser_14:
|
||||
{
|
||||
last_token_type = token_type::value_unsigned_integer;
|
||||
last_token_type = token_type::value_unsigned;
|
||||
break;
|
||||
}
|
||||
basic_json_parser_15:
|
||||
|
@ -10301,7 +10301,7 @@ basic_json_parser_43:
|
|||
}
|
||||
basic_json_parser_44:
|
||||
{
|
||||
last_token_type = token_type::value_signed_integer;
|
||||
last_token_type = token_type::value_integer;
|
||||
break;
|
||||
}
|
||||
basic_json_parser_45:
|
||||
|
@ -11091,19 +11091,19 @@ basic_json_parser_71:
|
|||
{
|
||||
assert(m_start != nullptr);
|
||||
assert(m_start < m_cursor);
|
||||
assert((token == token_type::value_unsigned_integer) or
|
||||
(token == token_type::value_signed_integer) or
|
||||
assert((token == token_type::value_unsigned) or
|
||||
(token == token_type::value_integer) or
|
||||
(token == token_type::value_float));
|
||||
|
||||
strtonum num(reinterpret_cast<const char*>(m_start),
|
||||
strtonum num_converter(reinterpret_cast<const char*>(m_start),
|
||||
reinterpret_cast<const char*>(m_cursor));
|
||||
|
||||
switch (token)
|
||||
{
|
||||
case lexer::token_type::value_unsigned_integer:
|
||||
case lexer::token_type::value_unsigned:
|
||||
{
|
||||
number_unsigned_t val{0};
|
||||
if (num.to(val))
|
||||
number_unsigned_t val;
|
||||
if (num_converter.to(val))
|
||||
{
|
||||
result.m_type = value_t::number_unsigned;
|
||||
result.m_value = val;
|
||||
|
@ -11112,10 +11112,10 @@ basic_json_parser_71:
|
|||
break;
|
||||
}
|
||||
|
||||
case lexer::token_type::value_signed_integer:
|
||||
case lexer::token_type::value_integer:
|
||||
{
|
||||
number_integer_t val{0};
|
||||
if (num.to(val))
|
||||
number_integer_t val;
|
||||
if (num_converter.to(val))
|
||||
{
|
||||
result.m_type = value_t::number_integer;
|
||||
result.m_value = val;
|
||||
|
@ -11130,8 +11130,10 @@ basic_json_parser_71:
|
|||
}
|
||||
}
|
||||
|
||||
number_float_t val{0};
|
||||
if (not num.to(val))
|
||||
// parse float (either explicitly or because a previous conversion
|
||||
// failed)
|
||||
number_float_t val;
|
||||
if (not num_converter.to(val))
|
||||
{
|
||||
// couldn't parse as float_t
|
||||
result.m_type = value_t::discarded;
|
||||
|
@ -11390,8 +11392,8 @@ basic_json_parser_71:
|
|||
break;
|
||||
}
|
||||
|
||||
case lexer::token_type::value_unsigned_integer:
|
||||
case lexer::token_type::value_signed_integer:
|
||||
case lexer::token_type::value_unsigned:
|
||||
case lexer::token_type::value_integer:
|
||||
case lexer::token_type::value_float:
|
||||
{
|
||||
m_lexer.get_number(result, last_token);
|
||||
|
|
|
@ -9444,8 +9444,8 @@ class basic_json
|
|||
literal_false, ///< the `false` literal
|
||||
literal_null, ///< the `null` literal
|
||||
value_string, ///< a string -- use get_string() for actual value
|
||||
value_unsigned_integer, ///< an unsigned integer -- use get_number() for actual value
|
||||
value_signed_integer, ///< a signed integer -- use get_number() for actual value
|
||||
value_unsigned, ///< an unsigned integer -- use get_number() for actual value
|
||||
value_integer, ///< a signed integer -- use get_number() for actual value
|
||||
value_float, ///< an floating point number -- use get_number() for actual value
|
||||
begin_array, ///< the character for array begin `[`
|
||||
begin_object, ///< the character for object begin `{`
|
||||
|
@ -9598,8 +9598,8 @@ class basic_json
|
|||
return "null literal";
|
||||
case token_type::value_string:
|
||||
return "string literal";
|
||||
case lexer::token_type::value_unsigned_integer:
|
||||
case lexer::token_type::value_signed_integer:
|
||||
case lexer::token_type::value_unsigned:
|
||||
case lexer::token_type::value_integer:
|
||||
case lexer::token_type::value_float:
|
||||
return "number literal";
|
||||
case token_type::begin_array:
|
||||
|
@ -9699,9 +9699,9 @@ class basic_json
|
|||
frac = decimal_point digit+;
|
||||
int = (zero | digit_1_9 digit*);
|
||||
number_unsigned = int;
|
||||
number_unsigned { last_token_type = token_type::value_unsigned_integer; break; }
|
||||
number_signed = minus int;
|
||||
number_signed { last_token_type = token_type::value_signed_integer; break; }
|
||||
number_unsigned { last_token_type = token_type::value_unsigned; break; }
|
||||
number_integer = minus int;
|
||||
number_integer { last_token_type = token_type::value_integer; break; }
|
||||
number_float = minus? int frac? exp?;
|
||||
number_float { last_token_type = token_type::value_float; break; }
|
||||
|
||||
|
@ -10163,19 +10163,19 @@ class basic_json
|
|||
{
|
||||
assert(m_start != nullptr);
|
||||
assert(m_start < m_cursor);
|
||||
assert((token == token_type::value_unsigned_integer) or
|
||||
(token == token_type::value_signed_integer) or
|
||||
assert((token == token_type::value_unsigned) or
|
||||
(token == token_type::value_integer) or
|
||||
(token == token_type::value_float));
|
||||
|
||||
strtonum num(reinterpret_cast<const char*>(m_start),
|
||||
strtonum num_converter(reinterpret_cast<const char*>(m_start),
|
||||
reinterpret_cast<const char*>(m_cursor));
|
||||
|
||||
switch (token)
|
||||
{
|
||||
case lexer::token_type::value_unsigned_integer:
|
||||
case lexer::token_type::value_unsigned:
|
||||
{
|
||||
number_unsigned_t val{0};
|
||||
if (num.to(val))
|
||||
number_unsigned_t val;
|
||||
if (num_converter.to(val))
|
||||
{
|
||||
result.m_type = value_t::number_unsigned;
|
||||
result.m_value = val;
|
||||
|
@ -10184,10 +10184,10 @@ class basic_json
|
|||
break;
|
||||
}
|
||||
|
||||
case lexer::token_type::value_signed_integer:
|
||||
case lexer::token_type::value_integer:
|
||||
{
|
||||
number_integer_t val{0};
|
||||
if (num.to(val))
|
||||
number_integer_t val;
|
||||
if (num_converter.to(val))
|
||||
{
|
||||
result.m_type = value_t::number_integer;
|
||||
result.m_value = val;
|
||||
|
@ -10202,8 +10202,10 @@ class basic_json
|
|||
}
|
||||
}
|
||||
|
||||
number_float_t val{0};
|
||||
if (not num.to(val))
|
||||
// parse float (either explicitly or because a previous conversion
|
||||
// failed)
|
||||
number_float_t val;
|
||||
if (not num_converter.to(val))
|
||||
{
|
||||
// couldn't parse as float_t
|
||||
result.m_type = value_t::discarded;
|
||||
|
@ -10462,8 +10464,8 @@ class basic_json
|
|||
break;
|
||||
}
|
||||
|
||||
case lexer::token_type::value_unsigned_integer:
|
||||
case lexer::token_type::value_signed_integer:
|
||||
case lexer::token_type::value_unsigned:
|
||||
case lexer::token_type::value_integer:
|
||||
case lexer::token_type::value_float:
|
||||
{
|
||||
m_lexer.get_number(result, last_token);
|
||||
|
|
|
@ -65,30 +65,30 @@ TEST_CASE("lexer class")
|
|||
SECTION("numbers")
|
||||
{
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("0"),
|
||||
1).scan() == json::lexer::token_type::value_unsigned_integer));
|
||||
1).scan() == json::lexer::token_type::value_unsigned));
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("1"),
|
||||
1).scan() == json::lexer::token_type::value_unsigned_integer));
|
||||
1).scan() == json::lexer::token_type::value_unsigned));
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("2"),
|
||||
1).scan() == json::lexer::token_type::value_unsigned_integer));
|
||||
1).scan() == json::lexer::token_type::value_unsigned));
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("3"),
|
||||
1).scan() == json::lexer::token_type::value_unsigned_integer));
|
||||
1).scan() == json::lexer::token_type::value_unsigned));
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("4"),
|
||||
1).scan() == json::lexer::token_type::value_unsigned_integer));
|
||||
1).scan() == json::lexer::token_type::value_unsigned));
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("5"),
|
||||
1).scan() == json::lexer::token_type::value_unsigned_integer));
|
||||
1).scan() == json::lexer::token_type::value_unsigned));
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("6"),
|
||||
1).scan() == json::lexer::token_type::value_unsigned_integer));
|
||||
1).scan() == json::lexer::token_type::value_unsigned));
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("7"),
|
||||
1).scan() == json::lexer::token_type::value_unsigned_integer));
|
||||
1).scan() == json::lexer::token_type::value_unsigned));
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("8"),
|
||||
1).scan() == json::lexer::token_type::value_unsigned_integer));
|
||||
1).scan() == json::lexer::token_type::value_unsigned));
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("9"),
|
||||
1).scan() == json::lexer::token_type::value_unsigned_integer));
|
||||
1).scan() == json::lexer::token_type::value_unsigned));
|
||||
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("-0"),
|
||||
2).scan() == json::lexer::token_type::value_signed_integer));
|
||||
2).scan() == json::lexer::token_type::value_integer));
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("-1"),
|
||||
2).scan() == json::lexer::token_type::value_signed_integer));
|
||||
2).scan() == json::lexer::token_type::value_integer));
|
||||
|
||||
CHECK((json::lexer(reinterpret_cast<const json::lexer::lexer_char_t*>("1.1"),
|
||||
3).scan() == json::lexer::token_type::value_float));
|
||||
|
@ -121,8 +121,8 @@ TEST_CASE("lexer class")
|
|||
CHECK((json::lexer::token_type_name(json::lexer::token_type::literal_false) == "false literal"));
|
||||
CHECK((json::lexer::token_type_name(json::lexer::token_type::literal_null) == "null literal"));
|
||||
CHECK((json::lexer::token_type_name(json::lexer::token_type::value_string) == "string literal"));
|
||||
CHECK((json::lexer::token_type_name(json::lexer::token_type::value_unsigned_integer) == "number literal"));
|
||||
CHECK((json::lexer::token_type_name(json::lexer::token_type::value_signed_integer) == "number literal"));
|
||||
CHECK((json::lexer::token_type_name(json::lexer::token_type::value_unsigned) == "number literal"));
|
||||
CHECK((json::lexer::token_type_name(json::lexer::token_type::value_integer) == "number literal"));
|
||||
CHECK((json::lexer::token_type_name(json::lexer::token_type::value_float) == "number literal"));
|
||||
CHECK((json::lexer::token_type_name(json::lexer::token_type::begin_array) == "'['"));
|
||||
CHECK((json::lexer::token_type_name(json::lexer::token_type::begin_object) == "'{'"));
|
||||
|
|
|
@ -417,7 +417,7 @@ TEST_CASE("regression tests")
|
|||
|
||||
// disabled, because locale-specific beharivor is not
|
||||
// triggered in AppVeyor for some reason
|
||||
#if 0
|
||||
#ifndef _MSC_VER
|
||||
{
|
||||
// verify that strtod now uses commas as decimal-separator
|
||||
CHECK(std::strtod("3,14", nullptr) == 3.14);
|
||||
|
|
Loading…
Reference in a new issue