🚨 fixed some warnings #821
This commit is contained in:
parent
ea5aed0769
commit
430f03512c
3 changed files with 14 additions and 13 deletions
21
src/json.hpp
21
src/json.hpp
|
@ -533,6 +533,7 @@ enum class value_t : uint8_t
|
||||||
Returns an ordering that is similar to Python:
|
Returns an ordering that is similar to Python:
|
||||||
- order: null < boolean < number < object < array < string
|
- order: null < boolean < number < object < array < string
|
||||||
- furthermore, each type is not smaller than itself
|
- furthermore, each type is not smaller than itself
|
||||||
|
- discarded values are not comparable
|
||||||
|
|
||||||
@since version 1.0.0
|
@since version 1.0.0
|
||||||
*/
|
*/
|
||||||
|
@ -550,9 +551,9 @@ inline bool operator<(const value_t lhs, const value_t rhs) noexcept
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// discarded values are not comparable
|
const auto l_index = static_cast<std::size_t>(lhs);
|
||||||
return lhs != value_t::discarded and rhs != value_t::discarded and
|
const auto r_index = static_cast<std::size_t>(rhs);
|
||||||
order[static_cast<std::size_t>(lhs)] < order[static_cast<std::size_t>(rhs)];
|
return (l_index < order.size() and r_index <= order.size() and order[l_index] < order[r_index]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1741,7 +1742,7 @@ class lexer
|
||||||
{
|
{
|
||||||
const auto loc = localeconv();
|
const auto loc = localeconv();
|
||||||
assert(loc != nullptr);
|
assert(loc != nullptr);
|
||||||
return (loc->decimal_point == nullptr) ? '.' : loc->decimal_point[0];
|
return (loc->decimal_point == nullptr) ? '.' : *(loc->decimal_point);
|
||||||
}
|
}
|
||||||
|
|
||||||
/////////////////////
|
/////////////////////
|
||||||
|
@ -1908,8 +1909,8 @@ class lexer
|
||||||
// unicode escapes
|
// unicode escapes
|
||||||
case 'u':
|
case 'u':
|
||||||
{
|
{
|
||||||
int codepoint;
|
|
||||||
const int codepoint1 = get_codepoint();
|
const int codepoint1 = get_codepoint();
|
||||||
|
int codepoint = codepoint1; // start with codepoint1
|
||||||
|
|
||||||
if (JSON_UNLIKELY(codepoint1 == -1))
|
if (JSON_UNLIKELY(codepoint1 == -1))
|
||||||
{
|
{
|
||||||
|
@ -1934,6 +1935,7 @@ class lexer
|
||||||
// check if codepoint2 is a low surrogate
|
// check if codepoint2 is a low surrogate
|
||||||
if (JSON_LIKELY(0xDC00 <= codepoint2 and codepoint2 <= 0xDFFF))
|
if (JSON_LIKELY(0xDC00 <= codepoint2 and codepoint2 <= 0xDFFF))
|
||||||
{
|
{
|
||||||
|
// overwrite codepoint
|
||||||
codepoint =
|
codepoint =
|
||||||
// high surrogate occupies the most significant 22 bits
|
// high surrogate occupies the most significant 22 bits
|
||||||
(codepoint1 << 10)
|
(codepoint1 << 10)
|
||||||
|
@ -1963,9 +1965,6 @@ class lexer
|
||||||
error_message = "invalid string: surrogate U+DC00..U+DFFF must follow U+D800..U+DBFF";
|
error_message = "invalid string: surrogate U+DC00..U+DFFF must follow U+D800..U+DBFF";
|
||||||
return token_type::parse_error;
|
return token_type::parse_error;
|
||||||
}
|
}
|
||||||
|
|
||||||
// only work with first code point
|
|
||||||
codepoint = codepoint1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// result of the above calculation yields a proper codepoint
|
// result of the above calculation yields a proper codepoint
|
||||||
|
@ -6090,8 +6089,8 @@ class serializer
|
||||||
*/
|
*/
|
||||||
serializer(output_adapter_t<char> s, const char ichar)
|
serializer(output_adapter_t<char> s, const char ichar)
|
||||||
: o(std::move(s)), loc(std::localeconv()),
|
: o(std::move(s)), loc(std::localeconv()),
|
||||||
thousands_sep(loc->thousands_sep == nullptr ? '\0' : loc->thousands_sep[0]),
|
thousands_sep(loc->thousands_sep == nullptr ? '\0' : * (loc->thousands_sep)),
|
||||||
decimal_point(loc->decimal_point == nullptr ? '\0' : loc->decimal_point[0]),
|
decimal_point(loc->decimal_point == nullptr ? '\0' : * (loc->decimal_point)),
|
||||||
indent_char(ichar), indent_string(512, indent_char) {}
|
indent_char(ichar), indent_string(512, indent_char) {}
|
||||||
|
|
||||||
// delete because of pointer members
|
// delete because of pointer members
|
||||||
|
@ -8046,11 +8045,13 @@ class basic_json
|
||||||
|
|
||||||
case value_t::null:
|
case value_t::null:
|
||||||
{
|
{
|
||||||
|
object = nullptr; // silence warning, see #821
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
{
|
{
|
||||||
|
object = nullptr; // silence warning, see #821
|
||||||
if (JSON_UNLIKELY(t == value_t::null))
|
if (JSON_UNLIKELY(t == value_t::null))
|
||||||
{
|
{
|
||||||
JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 2.1.1")); // LCOV_EXCL_LINE
|
JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 2.1.1")); // LCOV_EXCL_LINE
|
||||||
|
|
|
@ -1019,14 +1019,14 @@ TEST_CASE("value conversion")
|
||||||
|
|
||||||
SECTION("std::array is larger than JSON")
|
SECTION("std::array is larger than JSON")
|
||||||
{
|
{
|
||||||
std::array<int, 6> arr6 = {1, 2, 3, 4, 5, 6};
|
std::array<int, 6> arr6 = {{1, 2, 3, 4, 5, 6}};
|
||||||
CHECK_THROWS_AS(arr6 = j1, json::out_of_range);
|
CHECK_THROWS_AS(arr6 = j1, json::out_of_range);
|
||||||
CHECK_THROWS_WITH(arr6 = j1, "[json.exception.out_of_range.401] array index 4 is out of range");
|
CHECK_THROWS_WITH(arr6 = j1, "[json.exception.out_of_range.401] array index 4 is out of range");
|
||||||
}
|
}
|
||||||
|
|
||||||
SECTION("std::array is smaller than JSON")
|
SECTION("std::array is smaller than JSON")
|
||||||
{
|
{
|
||||||
std::array<int, 2> arr2 = {8, 9};
|
std::array<int, 2> arr2 = {{8, 9}};
|
||||||
arr2 = j1;
|
arr2 = j1;
|
||||||
CHECK(arr2[0] == 1);
|
CHECK(arr2[0] == 1);
|
||||||
CHECK(arr2[1] == 2);
|
CHECK(arr2[1] == 2);
|
||||||
|
|
|
@ -1309,7 +1309,7 @@ TEST_CASE("regression tests")
|
||||||
SECTION("issue #843 - converting to array not working")
|
SECTION("issue #843 - converting to array not working")
|
||||||
{
|
{
|
||||||
json j;
|
json j;
|
||||||
std::array<int, 4> ar = {1, 1, 1, 1};
|
std::array<int, 4> ar = {{1, 1, 1, 1}};
|
||||||
j = ar;
|
j = ar;
|
||||||
ar = j;
|
ar = j;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue