🎨 replace alternative operators (and, not, or)
This commit is contained in:
parent
4f04ea1bef
commit
0498202a03
22 changed files with 1300 additions and 1300 deletions
File diff suppressed because it is too large
Load diff
|
|
@ -131,7 +131,7 @@ class input_buffer_adapter
|
|||
{
|
||||
if (JSON_HEDLEY_LIKELY(cursor < limit))
|
||||
{
|
||||
assert(cursor != nullptr and limit != nullptr);
|
||||
assert(cursor != nullptr && limit != nullptr);
|
||||
return std::char_traits<char>::to_int_type(*(cursor++));
|
||||
}
|
||||
|
||||
|
|
@ -238,7 +238,7 @@ struct wide_string_input_helper<WideStringType, 2>
|
|||
utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | (wc & 0x3Fu));
|
||||
utf8_bytes_filled = 2;
|
||||
}
|
||||
else if (0xD800 > wc or wc >= 0xE000)
|
||||
else if (0xD800 > wc || wc >= 0xE000)
|
||||
{
|
||||
utf8_bytes[0] = static_cast<std::char_traits<char>::int_type>(0xE0u | ((wc >> 12u)));
|
||||
utf8_bytes[1] = static_cast<std::char_traits<char>::int_type>(0x80u | ((wc >> 6u) & 0x3Fu));
|
||||
|
|
@ -331,24 +331,24 @@ inline input_stream_adapter input_adapter(std::istream&& stream)
|
|||
return input_stream_adapter(stream);
|
||||
}
|
||||
|
||||
template<typename CharT, typename SizeT,
|
||||
typename std::enable_if<
|
||||
std::is_pointer<CharT>::value and
|
||||
std::is_integral<typename std::remove_pointer<CharT>::type>::value and
|
||||
not std::is_same<SizeT, bool>::value and
|
||||
sizeof(typename std::remove_pointer<CharT>::type) == 1,
|
||||
int>::type = 0>
|
||||
template < typename CharT, typename SizeT,
|
||||
typename std::enable_if <
|
||||
std::is_pointer<CharT>::value&&
|
||||
std::is_integral<typename std::remove_pointer<CharT>::type>::value&&
|
||||
!std::is_same<SizeT, bool>::value&&
|
||||
sizeof(typename std::remove_pointer<CharT>::type) == 1,
|
||||
int >::type = 0 >
|
||||
input_buffer_adapter input_adapter(CharT b, SizeT l)
|
||||
{
|
||||
return input_buffer_adapter(reinterpret_cast<const char*>(b), l);
|
||||
}
|
||||
|
||||
template<typename CharT,
|
||||
typename std::enable_if<
|
||||
std::is_pointer<CharT>::value and
|
||||
std::is_integral<typename std::remove_pointer<CharT>::type>::value and
|
||||
sizeof(typename std::remove_pointer<CharT>::type) == 1,
|
||||
int>::type = 0>
|
||||
template < typename CharT,
|
||||
typename std::enable_if <
|
||||
std::is_pointer<CharT>::value&&
|
||||
std::is_integral<typename std::remove_pointer<CharT>::type>::value&&
|
||||
sizeof(typename std::remove_pointer<CharT>::type) == 1,
|
||||
int >::type = 0 >
|
||||
input_buffer_adapter input_adapter(CharT b)
|
||||
{
|
||||
return input_adapter(reinterpret_cast<const char*>(b),
|
||||
|
|
@ -408,10 +408,10 @@ inline wide_string_input_adapter<std::u32string> input_adapter(const std::u32str
|
|||
return wide_string_input_adapter<std::u32string>(ws);
|
||||
}
|
||||
|
||||
template<class ContiguousContainer, typename
|
||||
std::enable_if<not std::is_pointer<ContiguousContainer>::value and
|
||||
std::is_base_of<std::random_access_iterator_tag, typename iterator_traits<decltype(std::begin(std::declval<ContiguousContainer const>()))>::iterator_category>::value,
|
||||
int>::type = 0>
|
||||
template < class ContiguousContainer, typename
|
||||
std::enable_if < !std::is_pointer<ContiguousContainer>::value&&
|
||||
std::is_base_of<std::random_access_iterator_tag, typename iterator_traits<decltype(std::begin(std::declval<ContiguousContainer const>()))>::iterator_category>::value,
|
||||
int >::type = 0 >
|
||||
input_buffer_adapter input_adapter(const ContiguousContainer& c)
|
||||
{
|
||||
return input_adapter(std::begin(c), std::end(c));
|
||||
|
|
@ -430,21 +430,21 @@ input_buffer_adapter input_adapter(T (&array)[N])
|
|||
class span_input_adapter
|
||||
{
|
||||
public:
|
||||
template<typename CharT,
|
||||
typename std::enable_if<
|
||||
std::is_pointer<CharT>::value and
|
||||
std::is_integral<typename std::remove_pointer<CharT>::type>::value and
|
||||
sizeof(typename std::remove_pointer<CharT>::type) == 1,
|
||||
int>::type = 0>
|
||||
template < typename CharT,
|
||||
typename std::enable_if <
|
||||
std::is_pointer<CharT>::value&&
|
||||
std::is_integral<typename std::remove_pointer<CharT>::type>::value&&
|
||||
sizeof(typename std::remove_pointer<CharT>::type) == 1,
|
||||
int >::type = 0 >
|
||||
span_input_adapter(CharT b, std::size_t l)
|
||||
: ia(reinterpret_cast<const char*>(b), l) {}
|
||||
|
||||
template<typename CharT,
|
||||
typename std::enable_if<
|
||||
std::is_pointer<CharT>::value and
|
||||
std::is_integral<typename std::remove_pointer<CharT>::type>::value and
|
||||
sizeof(typename std::remove_pointer<CharT>::type) == 1,
|
||||
int>::type = 0>
|
||||
template < typename CharT,
|
||||
typename std::enable_if <
|
||||
std::is_pointer<CharT>::value&&
|
||||
std::is_integral<typename std::remove_pointer<CharT>::type>::value&&
|
||||
sizeof(typename std::remove_pointer<CharT>::type) == 1,
|
||||
int >::type = 0 >
|
||||
span_input_adapter(CharT b)
|
||||
: span_input_adapter(reinterpret_cast<const char*>(b),
|
||||
std::strlen(reinterpret_cast<const char*>(b))) {}
|
||||
|
|
@ -461,10 +461,10 @@ class span_input_adapter
|
|||
: span_input_adapter(std::begin(array), std::end(array)) {}
|
||||
|
||||
/// input adapter for contiguous container
|
||||
template<class ContiguousContainer, typename
|
||||
std::enable_if<not std::is_pointer<ContiguousContainer>::value and
|
||||
std::is_base_of<std::random_access_iterator_tag, typename iterator_traits<decltype(std::begin(std::declval<ContiguousContainer const>()))>::iterator_category>::value,
|
||||
int>::type = 0>
|
||||
template < class ContiguousContainer, typename
|
||||
std::enable_if < !std::is_pointer<ContiguousContainer>::value&&
|
||||
std::is_base_of<std::random_access_iterator_tag, typename iterator_traits<decltype(std::begin(std::declval<ContiguousContainer const>()))>::iterator_category>::value,
|
||||
int >::type = 0 >
|
||||
span_input_adapter(const ContiguousContainer& c)
|
||||
: span_input_adapter(std::begin(c), std::end(c)) {}
|
||||
|
||||
|
|
|
|||
|
|
@ -218,7 +218,7 @@ class json_sax_dom_parser
|
|||
{
|
||||
ref_stack.push_back(handle_value(BasicJsonType::value_t::object));
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
if (JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) && len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408,
|
||||
"excessive object size: " + std::to_string(len)));
|
||||
|
|
@ -244,7 +244,7 @@ class json_sax_dom_parser
|
|||
{
|
||||
ref_stack.push_back(handle_value(BasicJsonType::value_t::array));
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
if (JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) && len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408,
|
||||
"excessive array size: " + std::to_string(len)));
|
||||
|
|
@ -309,7 +309,7 @@ class json_sax_dom_parser
|
|||
return &root;
|
||||
}
|
||||
|
||||
assert(ref_stack.back()->is_array() or ref_stack.back()->is_object());
|
||||
assert(ref_stack.back()->is_array() || ref_stack.back()->is_object());
|
||||
|
||||
if (ref_stack.back()->is_array())
|
||||
{
|
||||
|
|
@ -414,7 +414,7 @@ class json_sax_dom_callback_parser
|
|||
ref_stack.push_back(val.second);
|
||||
|
||||
// check object limit
|
||||
if (ref_stack.back() and JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
if (ref_stack.back() && JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) && len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408, "excessive object size: " + std::to_string(len)));
|
||||
}
|
||||
|
|
@ -431,7 +431,7 @@ class json_sax_dom_callback_parser
|
|||
key_keep_stack.push_back(keep);
|
||||
|
||||
// add discarded value at given key and store the reference for later
|
||||
if (keep and ref_stack.back())
|
||||
if (keep && ref_stack.back())
|
||||
{
|
||||
object_element = &(ref_stack.back()->m_value.object->operator[](val) = discarded);
|
||||
}
|
||||
|
|
@ -441,18 +441,18 @@ class json_sax_dom_callback_parser
|
|||
|
||||
bool end_object()
|
||||
{
|
||||
if (ref_stack.back() and not callback(static_cast<int>(ref_stack.size()) - 1, parse_event_t::object_end, *ref_stack.back()))
|
||||
if (ref_stack.back() && !callback(static_cast<int>(ref_stack.size()) - 1, parse_event_t::object_end, *ref_stack.back()))
|
||||
{
|
||||
// discard object
|
||||
*ref_stack.back() = discarded;
|
||||
}
|
||||
|
||||
assert(not ref_stack.empty());
|
||||
assert(not keep_stack.empty());
|
||||
assert(!ref_stack.empty());
|
||||
assert(!keep_stack.empty());
|
||||
ref_stack.pop_back();
|
||||
keep_stack.pop_back();
|
||||
|
||||
if (not ref_stack.empty() and ref_stack.back() and ref_stack.back()->is_structured())
|
||||
if (!ref_stack.empty() && ref_stack.back() && ref_stack.back()->is_structured())
|
||||
{
|
||||
// remove discarded value
|
||||
for (auto it = ref_stack.back()->begin(); it != ref_stack.back()->end(); ++it)
|
||||
|
|
@ -477,7 +477,7 @@ class json_sax_dom_callback_parser
|
|||
ref_stack.push_back(val.second);
|
||||
|
||||
// check array limit
|
||||
if (ref_stack.back() and JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
if (ref_stack.back() && JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) && len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408, "excessive array size: " + std::to_string(len)));
|
||||
}
|
||||
|
|
@ -492,20 +492,20 @@ class json_sax_dom_callback_parser
|
|||
if (ref_stack.back())
|
||||
{
|
||||
keep = callback(static_cast<int>(ref_stack.size()) - 1, parse_event_t::array_end, *ref_stack.back());
|
||||
if (not keep)
|
||||
if (!keep)
|
||||
{
|
||||
// discard array
|
||||
*ref_stack.back() = discarded;
|
||||
}
|
||||
}
|
||||
|
||||
assert(not ref_stack.empty());
|
||||
assert(not keep_stack.empty());
|
||||
assert(!ref_stack.empty());
|
||||
assert(!keep_stack.empty());
|
||||
ref_stack.pop_back();
|
||||
keep_stack.pop_back();
|
||||
|
||||
// remove discarded value
|
||||
if (not keep and not ref_stack.empty() and ref_stack.back()->is_array())
|
||||
if (!keep && !ref_stack.empty() && ref_stack.back()->is_array())
|
||||
{
|
||||
ref_stack.back()->m_value.array->pop_back();
|
||||
}
|
||||
|
|
@ -565,11 +565,11 @@ class json_sax_dom_callback_parser
|
|||
template<typename Value>
|
||||
std::pair<bool, BasicJsonType*> handle_value(Value&& v, const bool skip_callback = false)
|
||||
{
|
||||
assert(not keep_stack.empty());
|
||||
assert(!keep_stack.empty());
|
||||
|
||||
// do not handle this value if we know it would be added to a discarded
|
||||
// container
|
||||
if (not keep_stack.back())
|
||||
if (!keep_stack.back())
|
||||
{
|
||||
return {false, nullptr};
|
||||
}
|
||||
|
|
@ -578,10 +578,10 @@ class json_sax_dom_callback_parser
|
|||
auto value = BasicJsonType(std::forward<Value>(v));
|
||||
|
||||
// check callback
|
||||
const bool keep = skip_callback or callback(static_cast<int>(ref_stack.size()), parse_event_t::value, value);
|
||||
const bool keep = skip_callback || callback(static_cast<int>(ref_stack.size()), parse_event_t::value, value);
|
||||
|
||||
// do not handle this value if we just learnt it shall be discarded
|
||||
if (not keep)
|
||||
if (!keep)
|
||||
{
|
||||
return {false, nullptr};
|
||||
}
|
||||
|
|
@ -594,13 +594,13 @@ class json_sax_dom_callback_parser
|
|||
|
||||
// skip this value if we already decided to skip the parent
|
||||
// (https://github.com/nlohmann/json/issues/971#issuecomment-413678360)
|
||||
if (not ref_stack.back())
|
||||
if (!ref_stack.back())
|
||||
{
|
||||
return {false, nullptr};
|
||||
}
|
||||
|
||||
// we now only expect arrays and objects
|
||||
assert(ref_stack.back()->is_array() or ref_stack.back()->is_object());
|
||||
assert(ref_stack.back()->is_array() || ref_stack.back()->is_object());
|
||||
|
||||
// array
|
||||
if (ref_stack.back()->is_array())
|
||||
|
|
@ -612,11 +612,11 @@ class json_sax_dom_callback_parser
|
|||
// object
|
||||
assert(ref_stack.back()->is_object());
|
||||
// check if we should store an element for the current key
|
||||
assert(not key_keep_stack.empty());
|
||||
assert(!key_keep_stack.empty());
|
||||
const bool store_element = key_keep_stack.back();
|
||||
key_keep_stack.pop_back();
|
||||
|
||||
if (not store_element)
|
||||
if (!store_element)
|
||||
{
|
||||
return {false, nullptr};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -164,15 +164,15 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
{
|
||||
get();
|
||||
|
||||
if (current >= '0' and current <= '9')
|
||||
if (current >= '0' && current <= '9')
|
||||
{
|
||||
codepoint += static_cast<int>((static_cast<unsigned int>(current) - 0x30u) << factor);
|
||||
}
|
||||
else if (current >= 'A' and current <= 'F')
|
||||
else if (current >= 'A' && current <= 'F')
|
||||
{
|
||||
codepoint += static_cast<int>((static_cast<unsigned int>(current) - 0x37u) << factor);
|
||||
}
|
||||
else if (current >= 'a' and current <= 'f')
|
||||
else if (current >= 'a' && current <= 'f')
|
||||
{
|
||||
codepoint += static_cast<int>((static_cast<unsigned int>(current) - 0x57u) << factor);
|
||||
}
|
||||
|
|
@ -182,7 +182,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
}
|
||||
}
|
||||
|
||||
assert(0x0000 <= codepoint and codepoint <= 0xFFFF);
|
||||
assert(0x0000 <= codepoint && codepoint <= 0xFFFF);
|
||||
return codepoint;
|
||||
}
|
||||
|
||||
|
|
@ -203,13 +203,13 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
*/
|
||||
bool next_byte_in_range(std::initializer_list<int> ranges)
|
||||
{
|
||||
assert(ranges.size() == 2 or ranges.size() == 4 or ranges.size() == 6);
|
||||
assert(ranges.size() == 2 || ranges.size() == 4 || ranges.size() == 6);
|
||||
add(current);
|
||||
|
||||
for (auto range = ranges.begin(); range != ranges.end(); ++range)
|
||||
{
|
||||
get();
|
||||
if (JSON_HEDLEY_LIKELY(*range <= current and current <= *(++range)))
|
||||
if (JSON_HEDLEY_LIKELY(*range <= current && current <= *(++range)))
|
||||
{
|
||||
add(current);
|
||||
}
|
||||
|
|
@ -315,10 +315,10 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
}
|
||||
|
||||
// check if code point is a high surrogate
|
||||
if (0xD800 <= codepoint1 and codepoint1 <= 0xDBFF)
|
||||
if (0xD800 <= codepoint1 && codepoint1 <= 0xDBFF)
|
||||
{
|
||||
// expect next \uxxxx entry
|
||||
if (JSON_HEDLEY_LIKELY(get() == '\\' and get() == 'u'))
|
||||
if (JSON_HEDLEY_LIKELY(get() == '\\' && get() == 'u'))
|
||||
{
|
||||
const int codepoint2 = get_codepoint();
|
||||
|
||||
|
|
@ -329,7 +329,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
}
|
||||
|
||||
// check if codepoint2 is a low surrogate
|
||||
if (JSON_HEDLEY_LIKELY(0xDC00 <= codepoint2 and codepoint2 <= 0xDFFF))
|
||||
if (JSON_HEDLEY_LIKELY(0xDC00 <= codepoint2 && codepoint2 <= 0xDFFF))
|
||||
{
|
||||
// overwrite codepoint
|
||||
codepoint = static_cast<int>(
|
||||
|
|
@ -356,7 +356,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
}
|
||||
else
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(0xDC00 <= codepoint1 and codepoint1 <= 0xDFFF))
|
||||
if (JSON_HEDLEY_UNLIKELY(0xDC00 <= codepoint1 && codepoint1 <= 0xDFFF))
|
||||
{
|
||||
error_message = "invalid string: surrogate U+DC00..U+DFFF must follow U+D800..U+DBFF";
|
||||
return token_type::parse_error;
|
||||
|
|
@ -364,7 +364,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
}
|
||||
|
||||
// result of the above calculation yields a proper codepoint
|
||||
assert(0x00 <= codepoint and codepoint <= 0x10FFFF);
|
||||
assert(0x00 <= codepoint && codepoint <= 0x10FFFF);
|
||||
|
||||
// translate codepoint into bytes
|
||||
if (codepoint < 0x80)
|
||||
|
|
@ -731,7 +731,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
case 0xDE:
|
||||
case 0xDF:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not next_byte_in_range({0x80, 0xBF})))
|
||||
if (JSON_HEDLEY_UNLIKELY(!next_byte_in_range({0x80, 0xBF})))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
|
@ -741,7 +741,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
// U+0800..U+0FFF: bytes E0 A0..BF 80..BF
|
||||
case 0xE0:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0xA0, 0xBF, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0xA0, 0xBF, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
|
@ -765,7 +765,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
case 0xEE:
|
||||
case 0xEF:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0xBF, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
|
@ -775,7 +775,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
// U+D000..U+D7FF: bytes ED 80..9F 80..BF
|
||||
case 0xED:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0x80, 0x9F, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0x9F, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
|
@ -785,7 +785,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
// U+10000..U+3FFFF F0 90..BF 80..BF 80..BF
|
||||
case 0xF0:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0x90, 0xBF, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x90, 0xBF, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
|
@ -797,7 +797,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
case 0xF2:
|
||||
case 0xF3:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
|
@ -807,7 +807,7 @@ class lexer : public lexer_base<BasicJsonType>
|
|||
// U+100000..U+10FFFF F4 80..8F 80..BF 80..BF
|
||||
case 0xF4:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0x80, 0x8F, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0x8F, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
|
@ -1308,7 +1308,7 @@ scan_number_done:
|
|||
|
||||
if (JSON_HEDLEY_LIKELY(current != std::char_traits<char>::eof()))
|
||||
{
|
||||
assert(not token_string.empty());
|
||||
assert(!token_string.empty());
|
||||
token_string.pop_back();
|
||||
}
|
||||
}
|
||||
|
|
@ -1404,7 +1404,7 @@ scan_number_done:
|
|||
if (get() == 0xEF)
|
||||
{
|
||||
// check if we completely parse the BOM
|
||||
return get() == 0xBB and get() == 0xBF;
|
||||
return get() == 0xBB && get() == 0xBF;
|
||||
}
|
||||
|
||||
// the first character is not the beginning of the BOM; unget it to
|
||||
|
|
@ -1416,7 +1416,7 @@ scan_number_done:
|
|||
token_type scan()
|
||||
{
|
||||
// initially, skip the BOM
|
||||
if (position.chars_read_total == 0 and not skip_bom())
|
||||
if (position.chars_read_total == 0 && !skip_bom())
|
||||
{
|
||||
error_message = "invalid BOM; must be 0xEF 0xBB 0xBF if given";
|
||||
return token_type::parse_error;
|
||||
|
|
@ -1427,7 +1427,7 @@ scan_number_done:
|
|||
{
|
||||
get();
|
||||
}
|
||||
while (current == ' ' or current == '\t' or current == '\n' or current == '\r');
|
||||
while (current == ' ' || current == '\t' || current == '\n' || current == '\r');
|
||||
|
||||
switch (current)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ class parser
|
|||
result.assert_invariant();
|
||||
|
||||
// in strict mode, input must be completely read
|
||||
if (strict and (get_token() != token_type::end_of_input))
|
||||
if (strict && (get_token() != token_type::end_of_input))
|
||||
{
|
||||
sdp.parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
|
|
@ -118,7 +118,7 @@ class parser
|
|||
result.assert_invariant();
|
||||
|
||||
// in strict mode, input must be completely read
|
||||
if (strict and (get_token() != token_type::end_of_input))
|
||||
if (strict && (get_token() != token_type::end_of_input))
|
||||
{
|
||||
sdp.parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
|
|
@ -147,7 +147,7 @@ class parser
|
|||
return sax_parse(&sax_acceptor, strict);
|
||||
}
|
||||
|
||||
template <typename SAX>
|
||||
template<typename SAX>
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
bool sax_parse(SAX* sax, const bool strict = true)
|
||||
{
|
||||
|
|
@ -155,7 +155,7 @@ class parser
|
|||
const bool result = sax_parse_internal(sax);
|
||||
|
||||
// strict mode: next byte must be EOF
|
||||
if (result and strict and (get_token() != token_type::end_of_input))
|
||||
if (result && strict && (get_token() != token_type::end_of_input))
|
||||
{
|
||||
return sax->parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
|
|
@ -167,7 +167,7 @@ class parser
|
|||
}
|
||||
|
||||
private:
|
||||
template <typename SAX>
|
||||
template<typename SAX>
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
bool sax_parse_internal(SAX* sax)
|
||||
{
|
||||
|
|
@ -179,14 +179,14 @@ class parser
|
|||
|
||||
while (true)
|
||||
{
|
||||
if (not skip_to_state_evaluation)
|
||||
if (!skip_to_state_evaluation)
|
||||
{
|
||||
// invariant: get_token() was called before each iteration
|
||||
switch (last_token)
|
||||
{
|
||||
case token_type::begin_object:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_object(std::size_t(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_object(std::size_t(-1))))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -194,7 +194,7 @@ class parser
|
|||
// closing } -> we are done
|
||||
if (get_token() == token_type::end_object)
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->end_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->end_object()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -209,7 +209,7 @@ class parser
|
|||
parse_error::create(101, m_lexer.get_position(),
|
||||
exception_message(token_type::value_string, "object key")));
|
||||
}
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->key(m_lexer.get_string())))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->key(m_lexer.get_string())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -233,7 +233,7 @@ class parser
|
|||
|
||||
case token_type::begin_array:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_array(std::size_t(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->start_array(std::size_t(-1))))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -241,7 +241,7 @@ class parser
|
|||
// closing ] -> we are done
|
||||
if (get_token() == token_type::end_array)
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->end_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->end_array()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -259,14 +259,14 @@ class parser
|
|||
{
|
||||
const auto res = m_lexer.get_number_float();
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(not std::isfinite(res)))
|
||||
if (JSON_HEDLEY_UNLIKELY(!std::isfinite(res)))
|
||||
{
|
||||
return sax->parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
out_of_range::create(406, "number overflow parsing '" + m_lexer.get_token_string() + "'"));
|
||||
}
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->number_float(res, m_lexer.get_string())))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->number_float(res, m_lexer.get_string())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -276,7 +276,7 @@ class parser
|
|||
|
||||
case token_type::literal_false:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->boolean(false)))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->boolean(false)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -285,7 +285,7 @@ class parser
|
|||
|
||||
case token_type::literal_null:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->null()))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->null()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -294,7 +294,7 @@ class parser
|
|||
|
||||
case token_type::literal_true:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->boolean(true)))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->boolean(true)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -303,7 +303,7 @@ class parser
|
|||
|
||||
case token_type::value_integer:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->number_integer(m_lexer.get_number_integer())))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->number_integer(m_lexer.get_number_integer())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -312,7 +312,7 @@ class parser
|
|||
|
||||
case token_type::value_string:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->string(m_lexer.get_string())))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->string(m_lexer.get_string())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -321,7 +321,7 @@ class parser
|
|||
|
||||
case token_type::value_unsigned:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->number_unsigned(m_lexer.get_number_unsigned())))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->number_unsigned(m_lexer.get_number_unsigned())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -371,7 +371,7 @@ class parser
|
|||
// closing ]
|
||||
if (JSON_HEDLEY_LIKELY(last_token == token_type::end_array))
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->end_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->end_array()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -380,7 +380,7 @@ class parser
|
|||
// new value, we need to evaluate the new state first.
|
||||
// By setting skip_to_state_evaluation to false, we
|
||||
// are effectively jumping to the beginning of this if.
|
||||
assert(not states.empty());
|
||||
assert(!states.empty());
|
||||
states.pop_back();
|
||||
skip_to_state_evaluation = true;
|
||||
continue;
|
||||
|
|
@ -405,7 +405,7 @@ class parser
|
|||
exception_message(token_type::value_string, "object key")));
|
||||
}
|
||||
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->key(m_lexer.get_string())))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->key(m_lexer.get_string())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -427,7 +427,7 @@ class parser
|
|||
// closing }
|
||||
if (JSON_HEDLEY_LIKELY(last_token == token_type::end_object))
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->end_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(!sax->end_object()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
@ -436,7 +436,7 @@ class parser
|
|||
// new value, we need to evaluate the new state first.
|
||||
// By setting skip_to_state_evaluation to false, we
|
||||
// are effectively jumping to the beginning of this if.
|
||||
assert(not states.empty());
|
||||
assert(!states.empty());
|
||||
states.pop_back();
|
||||
skip_to_state_evaluation = true;
|
||||
continue;
|
||||
|
|
@ -460,7 +460,7 @@ class parser
|
|||
{
|
||||
std::string error_msg = "syntax error ";
|
||||
|
||||
if (not context.empty())
|
||||
if (!context.empty())
|
||||
{
|
||||
error_msg += "while parsing " + context + " ";
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue