fix cmake install directory (for real this time)

* Rename 'develop' folder to 'include/nlohmann'
* Rename 'src' folder to 'single_include/nlohmann'
* Use <nlohmann/*> headers in sources and tests
* Change amalgamate config file
This commit is contained in:
Théo DELRIEU 2018-01-29 11:21:11 +01:00
parent 9958dde3da
commit 14cd019861
No known key found for this signature in database
GPG key ID: 7D6E00D1DF01DEAF
73 changed files with 226 additions and 224 deletions

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,262 @@
#pragma once
#include <algorithm> // min
#include <array> // array
#include <cassert> // assert
#include <cstddef> // size_t
#include <cstring> // strlen
#include <ios> // streamsize, streamoff, streampos
#include <istream> // istream
#include <iterator> // begin, end, iterator_traits, random_access_iterator_tag, distance, next
#include <memory> // shared_ptr, make_shared, addressof
#include <numeric> // accumulate
#include <string> // string, char_traits
#include <type_traits> // enable_if, is_base_of, is_pointer, is_integral, remove_pointer
#include <utility> // pair, declval
#include <nlohmann/detail/macro_scope.hpp>
namespace nlohmann
{
namespace detail
{
////////////////////
// input adapters //
////////////////////
/*!
@brief abstract input adapter interface
Produces a stream of std::char_traits<char>::int_type characters from a
std::istream, a buffer, or some other input type. Accepts the return of exactly
one non-EOF character for future input. The int_type characters returned
consist of all valid char values as positive values (typically unsigned char),
plus an EOF value outside that range, specified by the value of the function
std::char_traits<char>::eof(). This value is typically -1, but could be any
arbitrary value which is not a valid char value.
*/
struct input_adapter_protocol
{
/// get a character [0,255] or std::char_traits<char>::eof().
virtual std::char_traits<char>::int_type get_character() = 0;
/// restore the last non-eof() character to input
virtual void unget_character() = 0;
virtual ~input_adapter_protocol() = default;
};
/// a type to simplify interfaces
using input_adapter_t = std::shared_ptr<input_adapter_protocol>;
/*!
Input adapter for a (caching) istream. Ignores a UFT Byte Order Mark at
beginning of input. Does not support changing the underlying std::streambuf
in mid-input. Maintains underlying std::istream and std::streambuf to support
subsequent use of standard std::istream operations to process any input
characters following those used in parsing the JSON input. Clears the
std::istream flags; any input errors (e.g., EOF) will be detected by the first
subsequent call for input from the std::istream.
*/
class input_stream_adapter : public input_adapter_protocol
{
public:
~input_stream_adapter() override
{
// clear stream flags; we use underlying streambuf I/O, do not
// maintain ifstream flags
is.clear();
}
explicit input_stream_adapter(std::istream& i)
: is(i), sb(*i.rdbuf())
{
// skip byte order mark
std::char_traits<char>::int_type c;
if ((c = get_character()) == 0xEF)
{
if ((c = get_character()) == 0xBB)
{
if ((c = get_character()) == 0xBF)
{
return; // Ignore BOM
}
else if (c != std::char_traits<char>::eof())
{
is.unget();
}
is.putback('\xBB');
}
else if (c != std::char_traits<char>::eof())
{
is.unget();
}
is.putback('\xEF');
}
else if (c != std::char_traits<char>::eof())
{
is.unget(); // no byte order mark; process as usual
}
}
// delete because of pointer members
input_stream_adapter(const input_stream_adapter&) = delete;
input_stream_adapter& operator=(input_stream_adapter&) = delete;
// std::istream/std::streambuf use std::char_traits<char>::to_int_type, to
// ensure that std::char_traits<char>::eof() and the character 0xFF do not
// end up as the same value, eg. 0xFFFFFFFF.
std::char_traits<char>::int_type get_character() override
{
return sb.sbumpc();
}
void unget_character() override
{
sb.sungetc(); // is.unget() avoided for performance
}
private:
/// the associated input stream
std::istream& is;
std::streambuf& sb;
};
/// input adapter for buffer input
class input_buffer_adapter : public input_adapter_protocol
{
public:
input_buffer_adapter(const char* b, const std::size_t l)
: cursor(b), limit(b + l), start(b)
{
// skip byte order mark
if (l >= 3 and b[0] == '\xEF' and b[1] == '\xBB' and b[2] == '\xBF')
{
cursor += 3;
}
}
// delete because of pointer members
input_buffer_adapter(const input_buffer_adapter&) = delete;
input_buffer_adapter& operator=(input_buffer_adapter&) = delete;
std::char_traits<char>::int_type get_character() noexcept override
{
if (JSON_LIKELY(cursor < limit))
{
return std::char_traits<char>::to_int_type(*(cursor++));
}
return std::char_traits<char>::eof();
}
void unget_character() noexcept override
{
if (JSON_LIKELY(cursor > start))
{
--cursor;
}
}
private:
/// pointer to the current character
const char* cursor;
/// pointer past the last character
const char* limit;
/// pointer to the first character
const char* start;
};
class input_adapter
{
public:
// native support
/// input adapter for input stream
input_adapter(std::istream& i)
: ia(std::make_shared<input_stream_adapter>(i)) {}
/// input adapter for input stream
input_adapter(std::istream&& i)
: ia(std::make_shared<input_stream_adapter>(i)) {}
/// input adapter for buffer
template<typename CharT,
typename std::enable_if<
std::is_pointer<CharT>::value and
std::is_integral<typename std::remove_pointer<CharT>::type>::value and
sizeof(typename std::remove_pointer<CharT>::type) == 1,
int>::type = 0>
input_adapter(CharT b, std::size_t l)
: ia(std::make_shared<input_buffer_adapter>(reinterpret_cast<const char*>(b), l)) {}
// derived support
/// input adapter for string literal
template<typename CharT,
typename std::enable_if<
std::is_pointer<CharT>::value and
std::is_integral<typename std::remove_pointer<CharT>::type>::value and
sizeof(typename std::remove_pointer<CharT>::type) == 1,
int>::type = 0>
input_adapter(CharT b)
: input_adapter(reinterpret_cast<const char*>(b),
std::strlen(reinterpret_cast<const char*>(b))) {}
/// input adapter for iterator range with contiguous storage
template<class IteratorType,
typename std::enable_if<
std::is_same<typename std::iterator_traits<IteratorType>::iterator_category, std::random_access_iterator_tag>::value,
int>::type = 0>
input_adapter(IteratorType first, IteratorType last)
{
// assertion to check that the iterator range is indeed contiguous,
// see http://stackoverflow.com/a/35008842/266378 for more discussion
assert(std::accumulate(
first, last, std::pair<bool, int>(true, 0),
[&first](std::pair<bool, int> res, decltype(*first) val)
{
res.first &= (val == *(std::next(std::addressof(*first), res.second++)));
return res;
}).first);
// assertion to check that each element is 1 byte long
static_assert(
sizeof(typename std::iterator_traits<IteratorType>::value_type) == 1,
"each element in the iterator range must have the size of 1 byte");
const auto len = static_cast<size_t>(std::distance(first, last));
if (JSON_LIKELY(len > 0))
{
// there is at least one element: use the address of first
ia = std::make_shared<input_buffer_adapter>(reinterpret_cast<const char*>(&(*first)), len);
}
else
{
// the address of first cannot be used: use nullptr
ia = std::make_shared<input_buffer_adapter>(nullptr, len);
}
}
/// input adapter for array
template<class T, std::size_t N>
input_adapter(T (&array)[N])
: input_adapter(std::begin(array), std::end(array)) {}
/// input adapter for contiguous container
template<class ContiguousContainer, typename
std::enable_if<not std::is_pointer<ContiguousContainer>::value and
std::is_base_of<std::random_access_iterator_tag, typename std::iterator_traits<decltype(std::begin(std::declval<ContiguousContainer const>()))>::iterator_category>::value,
int>::type = 0>
input_adapter(const ContiguousContainer& c)
: input_adapter(std::begin(c), std::end(c)) {}
operator input_adapter_t()
{
return ia;
}
private:
/// the actual adapter
input_adapter_t ia = nullptr;
};
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,589 @@
#pragma once
#include <cassert> // assert
#include <cmath> // isfinite
#include <cstdint> // uint8_t
#include <functional> // function
#include <string> // string
#include <utility> // move
#include <nlohmann/detail/exceptions.hpp>
#include <nlohmann/detail/macro_scope.hpp>
#include <nlohmann/detail/input/input_adapters.hpp>
#include <nlohmann/detail/input/lexer.hpp>
#include <nlohmann/detail/value_t.hpp>
namespace nlohmann
{
namespace detail
{
////////////
// parser //
////////////
/*!
@brief syntax analysis
This class implements a recursive decent parser.
*/
template<typename BasicJsonType>
class parser
{
using number_integer_t = typename BasicJsonType::number_integer_t;
using number_unsigned_t = typename BasicJsonType::number_unsigned_t;
using number_float_t = typename BasicJsonType::number_float_t;
using lexer_t = lexer<BasicJsonType>;
using token_type = typename lexer_t::token_type;
public:
enum class parse_event_t : uint8_t
{
/// the parser read `{` and started to process a JSON object
object_start,
/// the parser read `}` and finished processing a JSON object
object_end,
/// the parser read `[` and started to process a JSON array
array_start,
/// the parser read `]` and finished processing a JSON array
array_end,
/// the parser read a key of a value in an object
key,
/// the parser finished reading a JSON value
value
};
using parser_callback_t =
std::function<bool(int depth, parse_event_t event, BasicJsonType& parsed)>;
/// a parser reading from an input adapter
explicit parser(detail::input_adapter_t adapter,
const parser_callback_t cb = nullptr,
const bool allow_exceptions_ = true)
: callback(cb), m_lexer(adapter), allow_exceptions(allow_exceptions_)
{}
/*!
@brief public parser interface
@param[in] strict whether to expect the last token to be EOF
@param[in,out] result parsed JSON value
@throw parse_error.101 in case of an unexpected token
@throw parse_error.102 if to_unicode fails or surrogate error
@throw parse_error.103 if to_unicode fails
*/
void parse(const bool strict, BasicJsonType& result)
{
// read first token
get_token();
parse_internal(true, result);
result.assert_invariant();
// in strict mode, input must be completely read
if (strict)
{
get_token();
expect(token_type::end_of_input);
}
// in case of an error, return discarded value
if (errored)
{
result = value_t::discarded;
return;
}
// set top-level value to null if it was discarded by the callback
// function
if (result.is_discarded())
{
result = nullptr;
}
}
/*!
@brief public accept interface
@param[in] strict whether to expect the last token to be EOF
@return whether the input is a proper JSON text
*/
bool accept(const bool strict = true)
{
// read first token
get_token();
if (not accept_internal())
{
return false;
}
// strict => last token must be EOF
return not strict or (get_token() == token_type::end_of_input);
}
private:
/*!
@brief the actual parser
@throw parse_error.101 in case of an unexpected token
@throw parse_error.102 if to_unicode fails or surrogate error
@throw parse_error.103 if to_unicode fails
*/
void parse_internal(bool keep, BasicJsonType& result)
{
// never parse after a parse error was detected
assert(not errored);
// start with a discarded value
if (not result.is_discarded())
{
result.m_value.destroy(result.m_type);
result.m_type = value_t::discarded;
}
switch (last_token)
{
case token_type::begin_object:
{
if (keep)
{
if (callback)
{
keep = callback(depth++, parse_event_t::object_start, result);
}
if (not callback or keep)
{
// explicitly set result to object to cope with {}
result.m_type = value_t::object;
result.m_value = value_t::object;
}
}
// read next token
get_token();
// closing } -> we are done
if (last_token == token_type::end_object)
{
if (keep and callback and not callback(--depth, parse_event_t::object_end, result))
{
result.m_value.destroy(result.m_type);
result.m_type = value_t::discarded;
}
break;
}
// parse values
std::string key;
BasicJsonType value;
while (true)
{
// store key
if (not expect(token_type::value_string))
{
return;
}
key = m_lexer.move_string();
bool keep_tag = false;
if (keep)
{
if (callback)
{
BasicJsonType k(key);
keep_tag = callback(depth, parse_event_t::key, k);
}
else
{
keep_tag = true;
}
}
// parse separator (:)
get_token();
if (not expect(token_type::name_separator))
{
return;
}
// parse and add value
get_token();
value.m_value.destroy(value.m_type);
value.m_type = value_t::discarded;
parse_internal(keep, value);
if (JSON_UNLIKELY(errored))
{
return;
}
if (keep and keep_tag and not value.is_discarded())
{
result.m_value.object->emplace(std::move(key), std::move(value));
}
// comma -> next value
get_token();
if (last_token == token_type::value_separator)
{
get_token();
continue;
}
// closing }
if (not expect(token_type::end_object))
{
return;
}
break;
}
if (keep and callback and not callback(--depth, parse_event_t::object_end, result))
{
result.m_value.destroy(result.m_type);
result.m_type = value_t::discarded;
}
break;
}
case token_type::begin_array:
{
if (keep)
{
if (callback)
{
keep = callback(depth++, parse_event_t::array_start, result);
}
if (not callback or keep)
{
// explicitly set result to array to cope with []
result.m_type = value_t::array;
result.m_value = value_t::array;
}
}
// read next token
get_token();
// closing ] -> we are done
if (last_token == token_type::end_array)
{
if (callback and not callback(--depth, parse_event_t::array_end, result))
{
result.m_value.destroy(result.m_type);
result.m_type = value_t::discarded;
}
break;
}
// parse values
BasicJsonType value;
while (true)
{
// parse value
value.m_value.destroy(value.m_type);
value.m_type = value_t::discarded;
parse_internal(keep, value);
if (JSON_UNLIKELY(errored))
{
return;
}
if (keep and not value.is_discarded())
{
result.m_value.array->push_back(std::move(value));
}
// comma -> next value
get_token();
if (last_token == token_type::value_separator)
{
get_token();
continue;
}
// closing ]
if (not expect(token_type::end_array))
{
return;
}
break;
}
if (keep and callback and not callback(--depth, parse_event_t::array_end, result))
{
result.m_value.destroy(result.m_type);
result.m_type = value_t::discarded;
}
break;
}
case token_type::literal_null:
{
result.m_type = value_t::null;
break;
}
case token_type::value_string:
{
result.m_type = value_t::string;
result.m_value = m_lexer.move_string();
break;
}
case token_type::literal_true:
{
result.m_type = value_t::boolean;
result.m_value = true;
break;
}
case token_type::literal_false:
{
result.m_type = value_t::boolean;
result.m_value = false;
break;
}
case token_type::value_unsigned:
{
result.m_type = value_t::number_unsigned;
result.m_value = m_lexer.get_number_unsigned();
break;
}
case token_type::value_integer:
{
result.m_type = value_t::number_integer;
result.m_value = m_lexer.get_number_integer();
break;
}
case token_type::value_float:
{
result.m_type = value_t::number_float;
result.m_value = m_lexer.get_number_float();
// throw in case of infinity or NAN
if (JSON_UNLIKELY(not std::isfinite(result.m_value.number_float)))
{
if (allow_exceptions)
{
JSON_THROW(out_of_range::create(406, "number overflow parsing '" +
m_lexer.get_token_string() + "'"));
}
expect(token_type::uninitialized);
}
break;
}
case token_type::parse_error:
{
// using "uninitialized" to avoid "expected" message
if (not expect(token_type::uninitialized))
{
return;
}
break; // LCOV_EXCL_LINE
}
default:
{
// the last token was unexpected; we expected a value
if (not expect(token_type::literal_or_value))
{
return;
}
break; // LCOV_EXCL_LINE
}
}
if (keep and callback and not callback(depth, parse_event_t::value, result))
{
result.m_type = value_t::discarded;
}
}
/*!
@brief the actual acceptor
@invariant 1. The last token is not yet processed. Therefore, the caller
of this function must make sure a token has been read.
2. When this function returns, the last token is processed.
That is, the last read character was already considered.
This invariant makes sure that no token needs to be "unput".
*/
bool accept_internal()
{
switch (last_token)
{
case token_type::begin_object:
{
// read next token
get_token();
// closing } -> we are done
if (last_token == token_type::end_object)
{
return true;
}
// parse values
while (true)
{
// parse key
if (last_token != token_type::value_string)
{
return false;
}
// parse separator (:)
get_token();
if (last_token != token_type::name_separator)
{
return false;
}
// parse value
get_token();
if (not accept_internal())
{
return false;
}
// comma -> next value
get_token();
if (last_token == token_type::value_separator)
{
get_token();
continue;
}
// closing }
return (last_token == token_type::end_object);
}
}
case token_type::begin_array:
{
// read next token
get_token();
// closing ] -> we are done
if (last_token == token_type::end_array)
{
return true;
}
// parse values
while (true)
{
// parse value
if (not accept_internal())
{
return false;
}
// comma -> next value
get_token();
if (last_token == token_type::value_separator)
{
get_token();
continue;
}
// closing ]
return (last_token == token_type::end_array);
}
}
case token_type::value_float:
{
// reject infinity or NAN
return std::isfinite(m_lexer.get_number_float());
}
case token_type::literal_false:
case token_type::literal_null:
case token_type::literal_true:
case token_type::value_integer:
case token_type::value_string:
case token_type::value_unsigned:
return true;
default: // the last token was unexpected
return false;
}
}
/// get next token from lexer
token_type get_token()
{
return (last_token = m_lexer.scan());
}
/*!
@throw parse_error.101 if expected token did not occur
*/
bool expect(token_type t)
{
if (JSON_UNLIKELY(t != last_token))
{
errored = true;
expected = t;
if (allow_exceptions)
{
throw_exception();
}
else
{
return false;
}
}
return true;
}
[[noreturn]] void throw_exception() const
{
std::string error_msg = "syntax error - ";
if (last_token == token_type::parse_error)
{
error_msg += std::string(m_lexer.get_error_message()) + "; last read: '" +
m_lexer.get_token_string() + "'";
}
else
{
error_msg += "unexpected " + std::string(lexer_t::token_type_name(last_token));
}
if (expected != token_type::uninitialized)
{
error_msg += "; expected " + std::string(lexer_t::token_type_name(expected));
}
JSON_THROW(parse_error::create(101, m_lexer.get_position(), error_msg));
}
private:
/// current level of recursion
int depth = 0;
/// callback function
const parser_callback_t callback = nullptr;
/// the type of the last read token
token_type last_token = token_type::uninitialized;
/// the lexer
lexer_t m_lexer;
/// whether a syntax error occurred
bool errored = false;
/// possible reason for the syntax error
token_type expected = token_type::uninitialized;
/// whether to throw exceptions in case of errors
const bool allow_exceptions = true;
};
}
}