Merge branch 'release/3.7.0'
This commit is contained in:
commit
ea60d40f4a
102 changed files with 12628 additions and 13219 deletions
27
.circleci/config.yml
Normal file
27
.circleci/config.yml
Normal file
|
@ -0,0 +1,27 @@
|
|||
version: 2
|
||||
jobs:
|
||||
build:
|
||||
docker:
|
||||
- image: debian:stretch
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: Install sudo
|
||||
command: 'apt-get update && apt-get install -y sudo && rm -rf /var/lib/apt/lists/*'
|
||||
- run:
|
||||
name: Install GCC
|
||||
command: 'apt-get update && apt-get install -y gcc g++'
|
||||
- run:
|
||||
name: Install CMake
|
||||
command: 'apt-get update && sudo apt-get install -y cmake'
|
||||
- run:
|
||||
name: Create build files
|
||||
command: 'mkdir build ; cd build ; cmake ..'
|
||||
- run:
|
||||
name: Compile
|
||||
command: 'cmake --build build'
|
||||
- run:
|
||||
name: Execute test suite
|
||||
command: 'cd build ; ctest -j 2'
|
82
.doozer.json
Normal file
82
.doozer.json
Normal file
|
@ -0,0 +1,82 @@
|
|||
{
|
||||
"targets": {
|
||||
"raspbian-jessie": {
|
||||
"buildenv": "raspbian-jessie",
|
||||
"builddeps": ["build-essential", "wget"],
|
||||
"buildcmd": [
|
||||
"uname -a",
|
||||
"cat /etc/os-release",
|
||||
"g++ --version",
|
||||
"cd",
|
||||
"wget https://github.com/Kitware/CMake/releases/download/v3.14.0/cmake-3.14.0.tar.gz",
|
||||
"tar xfz cmake-3.14.0.tar.gz",
|
||||
"cd cmake-3.14.0",
|
||||
"./bootstrap",
|
||||
"make -j8",
|
||||
"cd",
|
||||
"mkdir build",
|
||||
"cd build",
|
||||
"../cmake-3.14.0/bin/cmake /project/repo/checkout",
|
||||
"make -j8",
|
||||
"../cmake-3.14.0/bin/ctest -VV -j4 --timeout 10000"
|
||||
]
|
||||
},
|
||||
"xenial-armhf": {
|
||||
"buildenv": "xenial-armhf",
|
||||
"builddeps": ["build-essential", "wget"],
|
||||
"buildcmd": [
|
||||
"uname -a",
|
||||
"lsb_release -a",
|
||||
"g++ --version",
|
||||
"cd",
|
||||
"wget --no-check-certificate https://github.com/Kitware/CMake/releases/download/v3.14.0/cmake-3.14.0.tar.gz",
|
||||
"tar xfz cmake-3.14.0.tar.gz",
|
||||
"cd cmake-3.14.0",
|
||||
"./bootstrap",
|
||||
"make -j8",
|
||||
"cd",
|
||||
"mkdir build",
|
||||
"cd build",
|
||||
"../cmake-3.14.0/bin/cmake /project/repo/checkout",
|
||||
"make -j8",
|
||||
"../cmake-3.14.0/bin/ctest -VV -j4 --timeout 10000"
|
||||
]
|
||||
},
|
||||
"fedora24-x86_64": {
|
||||
"buildenv": "fedora24-x86_64",
|
||||
"builddeps": ["cmake", "make", "gcc gcc-c++"],
|
||||
"buildcmd": [
|
||||
"uname -a",
|
||||
"cat /etc/fedora-release",
|
||||
"g++ --version",
|
||||
"cd",
|
||||
"mkdir build",
|
||||
"cd build",
|
||||
"cmake /project/repo/checkout",
|
||||
"make -j8",
|
||||
"ctest -VV -j8"
|
||||
]
|
||||
},
|
||||
"centos7-x86_64": {
|
||||
"buildenv": "centos7-x86_64",
|
||||
"builddeps": ["make", "wget", "gcc-c++"],
|
||||
"buildcmd": [
|
||||
"uname -a",
|
||||
"rpm -q centos-release",
|
||||
"g++ --version",
|
||||
"cd",
|
||||
"wget https://github.com/Kitware/CMake/releases/download/v3.14.0/cmake-3.14.0.tar.gz",
|
||||
"tar xfz cmake-3.14.0.tar.gz",
|
||||
"cd cmake-3.14.0",
|
||||
"./bootstrap",
|
||||
"make -j8",
|
||||
"cd",
|
||||
"mkdir build",
|
||||
"cd build",
|
||||
"../cmake-3.14.0/bin/cmake /project/repo/checkout",
|
||||
"make -j8",
|
||||
"../cmake-3.14.0/bin/ctest -VV -j8"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
8
.github/CONTRIBUTING.md
vendored
8
.github/CONTRIBUTING.md
vendored
|
@ -4,7 +4,7 @@
|
|||
|
||||
This project started as a little excuse to exercise some of the cool new C++11 features. Over time, people actually started to use the JSON library (yey!) and started to help improve it by proposing features, finding bugs, or even fixing my mistakes. I am really [thankful](https://github.com/nlohmann/json/blob/master/README.md#thanks) for this and try to keep track of all the helpers.
|
||||
|
||||
To make it as easy as possible for you to contribute and for me to keep an overview, here are a few guidelines which should help us avoid all kinds of unnecessary work or disappointment. And of course, this document is subject to discussion, so please [create an issue](https://github.com/nlohmann/json/issues/new) or a pull request if you find a way to improve it!
|
||||
To make it as easy as possible for you to contribute and for me to keep an overview, here are a few guidelines which should help us avoid all kinds of unnecessary work or disappointment. And of course, this document is subject to discussion, so please [create an issue](https://github.com/nlohmann/json/issues/new/choose) or a pull request if you find a way to improve it!
|
||||
|
||||
## Private reports
|
||||
|
||||
|
@ -12,7 +12,7 @@ Usually, all issues are tracked publicly on [GitHub](https://github.com/nlohmann
|
|||
|
||||
## Prerequisites
|
||||
|
||||
Please [create an issue](https://github.com/nlohmann/json/issues/new), assuming one does not already exist, and describe your concern. Note you need a [GitHub account](https://github.com/signup/free) for this.
|
||||
Please [create an issue](https://github.com/nlohmann/json/issues/new/choose), assuming one does not already exist, and describe your concern. Note you need a [GitHub account](https://github.com/signup/free) for this.
|
||||
|
||||
## Describe your issue
|
||||
|
||||
|
@ -22,7 +22,7 @@ Clearly describe the issue:
|
|||
- If you propose a change or addition, try to give an **example** how the improved code could look like or how to use it.
|
||||
- If you found a compilation error, please tell us which **compiler** (version and operating system) you used and paste the (relevant part of) the error messages to the ticket.
|
||||
|
||||
Please stick to the [issue template](https://github.com/nlohmann/json/blob/develop/.github/ISSUE_TEMPLATE.md) if possible.
|
||||
Please stick to the provided issue templates ([bug report](https://github.com/nlohmann/json/blob/develop/.github/ISSUE_TEMPLATE/Bug_report.md), [feature request](https://github.com/nlohmann/json/blob/develop/.github/ISSUE_TEMPLATE/Feature_request.md), or [question](https://github.com/nlohmann/json/blob/develop/.github/ISSUE_TEMPLATE/question.md)) if possible.
|
||||
|
||||
## Files to change
|
||||
|
||||
|
@ -32,7 +32,7 @@ To make changes, you need to edit the following files:
|
|||
|
||||
1. [`include/nlohmann/*`](https://github.com/nlohmann/json/tree/develop/include/nlohmann) - These files are the sources of the library. Before testing or creating a pull request, execute `make amalgamate` to regenerate `single_include/nlohmann/json.hpp`.
|
||||
|
||||
2. [`test/src/unit-*.cpp`](https://github.com/nlohmann/json/tree/develop/test/src) - These files contain the [Catch](https://github.com/philsquared/Catch) unit tests which currently cover [100 %](https://coveralls.io/github/nlohmann/json) of the library's code.
|
||||
2. [`test/src/unit-*.cpp`](https://github.com/nlohmann/json/tree/develop/test/src) - These files contain the [doctest](https://github.com/onqtam/doctest) unit tests which currently cover [100 %](https://coveralls.io/github/nlohmann/json) of the library's code.
|
||||
|
||||
If you add or change a feature, please also add a unit test to this file. The unit tests can be compiled and executed with
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ cmake_minimum_required(VERSION 3.1)
|
|||
## PROJECT
|
||||
## name and version
|
||||
##
|
||||
project(nlohmann_json VERSION 3.6.1 LANGUAGES CXX)
|
||||
project(nlohmann_json VERSION 3.7.0 LANGUAGES CXX)
|
||||
|
||||
##
|
||||
## INCLUDE
|
||||
|
@ -22,10 +22,11 @@ option(JSON_MultipleHeaders "Use non-amalgamated version of the library." OFF)
|
|||
##
|
||||
## CONFIGURATION
|
||||
##
|
||||
include(GNUInstallDirs)
|
||||
|
||||
set(NLOHMANN_JSON_TARGET_NAME ${PROJECT_NAME})
|
||||
set(NLOHMANN_JSON_CONFIG_INSTALL_DIR "lib/cmake/${PROJECT_NAME}"
|
||||
CACHE INTERNAL "")
|
||||
set(NLOHMANN_JSON_INCLUDE_INSTALL_DIR "include")
|
||||
set(NLOHMANN_JSON_CONFIG_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}" CACHE INTERNAL "")
|
||||
set(NLOHMANN_JSON_INCLUDE_INSTALL_DIR "${CMAKE_INSTALL_INCLUDEDIR}")
|
||||
set(NLOHMANN_JSON_TARGETS_EXPORT_NAME "${PROJECT_NAME}Targets")
|
||||
set(NLOHMANN_JSON_CMAKE_CONFIG_TEMPLATE "cmake/config.cmake.in")
|
||||
set(NLOHMANN_JSON_CMAKE_CONFIG_DIR "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
|
|
146
ChangeLog.md
146
ChangeLog.md
|
@ -1,6 +1,150 @@
|
|||
# Change Log
|
||||
All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
## [v3.7.0](https://github.com/nlohmann/json/releases/tag/v3.7.0) (2019-07-28)
|
||||
[Full Changelog](https://github.com/nlohmann/json/compare/v3.6.1...v3.7.0)
|
||||
|
||||
- How can I retrieve uknown strings from json file in my C++ program. [\#1684](https://github.com/nlohmann/json/issues/1684)
|
||||
- contains\(\) is sometimes causing stack-based buffer overrun exceptions [\#1683](https://github.com/nlohmann/json/issues/1683)
|
||||
- How to deserialize arrays from json [\#1681](https://github.com/nlohmann/json/issues/1681)
|
||||
- Compilation failed in VS2015 [\#1678](https://github.com/nlohmann/json/issues/1678)
|
||||
- Why the compiled object file is so huge? [\#1677](https://github.com/nlohmann/json/issues/1677)
|
||||
- From Version 2.1.1 to 3.6.1 serialize std::set [\#1676](https://github.com/nlohmann/json/issues/1676)
|
||||
- Qt deprecation model halting compiltion [\#1675](https://github.com/nlohmann/json/issues/1675)
|
||||
- Build For Raspberry pi , Rapbery with new Compiler C++17 [\#1671](https://github.com/nlohmann/json/issues/1671)
|
||||
- Build from Raspberry pi [\#1667](https://github.com/nlohmann/json/issues/1667)
|
||||
- Can not translate map with integer key to dict string ? [\#1664](https://github.com/nlohmann/json/issues/1664)
|
||||
- Double type converts to scientific notation [\#1661](https://github.com/nlohmann/json/issues/1661)
|
||||
- Missing v3.6.1 tag on master branch [\#1657](https://github.com/nlohmann/json/issues/1657)
|
||||
- Support Fleese Binary Data Format [\#1654](https://github.com/nlohmann/json/issues/1654)
|
||||
- Suggestion: replace alternative tokens for !, && and || with their symbols [\#1652](https://github.com/nlohmann/json/issues/1652)
|
||||
- Build failure test-allocator.vcxproj [\#1651](https://github.com/nlohmann/json/issues/1651)
|
||||
- How to provide function json& to\_json\(\) which is similar as 'void to\_json\(json&j, const CObject& obj\)' ? [\#1650](https://github.com/nlohmann/json/issues/1650)
|
||||
- Can't throw exception when starting file is a number [\#1649](https://github.com/nlohmann/json/issues/1649)
|
||||
- to\_json / from\_json with nested type [\#1648](https://github.com/nlohmann/json/issues/1648)
|
||||
- How to create a json object from a std::string, created by j.dump? [\#1645](https://github.com/nlohmann/json/issues/1645)
|
||||
- Problem getting vector \(array\) of strings [\#1644](https://github.com/nlohmann/json/issues/1644)
|
||||
- json.hpp compilation issue with other typedefs with same name [\#1642](https://github.com/nlohmann/json/issues/1642)
|
||||
- nlohmann::adl\_serializer\<T,void\>::to\_json no matching overloaded function found [\#1641](https://github.com/nlohmann/json/issues/1641)
|
||||
- overwrite adl\_serializer\<bool, void\> to change behaviour [\#1638](https://github.com/nlohmann/json/issues/1638)
|
||||
- json.SelectToken\("Manufacturers.Products.Price"\); [\#1637](https://github.com/nlohmann/json/issues/1637)
|
||||
- Add json type as value [\#1636](https://github.com/nlohmann/json/issues/1636)
|
||||
- Unit conversion test error: conversion from 'nlohmann::json' to non-scalar type 'std::string\_view' requested [\#1634](https://github.com/nlohmann/json/issues/1634)
|
||||
- nlohmann VS JsonCpp by C++17 [\#1633](https://github.com/nlohmann/json/issues/1633)
|
||||
- To integrate an inline helper function that return type name as string [\#1632](https://github.com/nlohmann/json/issues/1632)
|
||||
- Return JSON as reference [\#1631](https://github.com/nlohmann/json/issues/1631)
|
||||
- Updating from an older version causes problems with assing a json object to a struct [\#1630](https://github.com/nlohmann/json/issues/1630)
|
||||
- Can without default constructor function for user defined classes when only to\_json is needed? [\#1629](https://github.com/nlohmann/json/issues/1629)
|
||||
- Compilation fails with clang 6.x-8.x in C++14 mode [\#1628](https://github.com/nlohmann/json/issues/1628)
|
||||
- Treating floating point as string [\#1627](https://github.com/nlohmann/json/issues/1627)
|
||||
- error parsing character å [\#1626](https://github.com/nlohmann/json/issues/1626)
|
||||
- \[Help\] How to Improve Json Output Performance with Large Json Arrays [\#1624](https://github.com/nlohmann/json/issues/1624)
|
||||
- Suggested link changes for reporting new issues \[blob/develop/REAME.md and blob/develop/.github/CONTRIBUTING.md\] [\#1623](https://github.com/nlohmann/json/issues/1623)
|
||||
- Broken link to issue template in CONTRIBUTING.md [\#1622](https://github.com/nlohmann/json/issues/1622)
|
||||
- Missing word in README.md file [\#1621](https://github.com/nlohmann/json/issues/1621)
|
||||
- Package manager instructions in README for brew is incorrect [\#1620](https://github.com/nlohmann/json/issues/1620)
|
||||
- Building with Visual Studio 2019 [\#1619](https://github.com/nlohmann/json/issues/1619)
|
||||
- Precedence of to\_json and builtin harmful [\#1617](https://github.com/nlohmann/json/issues/1617)
|
||||
- The type json is missing from the html documentation [\#1616](https://github.com/nlohmann/json/issues/1616)
|
||||
- variant is not support in Release 3.6.1? [\#1615](https://github.com/nlohmann/json/issues/1615)
|
||||
- Replace assert with throw for const operator\[\] [\#1614](https://github.com/nlohmann/json/issues/1614)
|
||||
- Memory Overhead is Too High \(10x or more\) [\#1613](https://github.com/nlohmann/json/issues/1613)
|
||||
- program crash everytime, when other data type incomming in json stream as expected [\#1612](https://github.com/nlohmann/json/issues/1612)
|
||||
- Improved Enum Support [\#1611](https://github.com/nlohmann/json/issues/1611)
|
||||
- is it possible convert json object back to stl container ? [\#1610](https://github.com/nlohmann/json/issues/1610)
|
||||
- Add C++17-like emplace.back\(\) for arrays. [\#1609](https://github.com/nlohmann/json/issues/1609)
|
||||
- is\_nothrow\_copy\_constructible fails for json::const\_iterator on MSVC2015 x86 Debug build [\#1608](https://github.com/nlohmann/json/issues/1608)
|
||||
- Reading and writing array elements [\#1607](https://github.com/nlohmann/json/issues/1607)
|
||||
- Converting json::value to int [\#1605](https://github.com/nlohmann/json/issues/1605)
|
||||
- I have a vector of keys and and a string of value and i want to create nested json array [\#1604](https://github.com/nlohmann/json/issues/1604)
|
||||
- In compatible JSON object from nlohmann::json to nohman::json - unexpected end of input; expected '\[', '{', or a literal [\#1603](https://github.com/nlohmann/json/issues/1603)
|
||||
- json parser crash if having a large number integer in message [\#1602](https://github.com/nlohmann/json/issues/1602)
|
||||
- Value method with undocumented throwing 302 exception [\#1601](https://github.com/nlohmann/json/issues/1601)
|
||||
- Accessing value with json pointer adds key if not existing [\#1600](https://github.com/nlohmann/json/issues/1600)
|
||||
- README.md broken link to project documentation [\#1597](https://github.com/nlohmann/json/issues/1597)
|
||||
- Random Kudos: Thanks for your work on this! [\#1596](https://github.com/nlohmann/json/issues/1596)
|
||||
- json::parse return value and errors [\#1595](https://github.com/nlohmann/json/issues/1595)
|
||||
- initializer list constructor makes curly brace initialization fragile [\#1594](https://github.com/nlohmann/json/issues/1594)
|
||||
- trying to log message for missing keyword, difference between \["foo"\] and at\("foo"\) [\#1593](https://github.com/nlohmann/json/issues/1593)
|
||||
- std::string and std::wstring `to\_json` [\#1592](https://github.com/nlohmann/json/issues/1592)
|
||||
- I have a C structure which I need to convert to a JSON. How do I do it? Haven't found proper examples so far. [\#1591](https://github.com/nlohmann/json/issues/1591)
|
||||
- dump\_escaped possible error ? [\#1589](https://github.com/nlohmann/json/issues/1589)
|
||||
- json::parse\(\) into a vector\<string\> results in unhandled exception [\#1587](https://github.com/nlohmann/json/issues/1587)
|
||||
- push\_back\(\)/emplace\_back\(\) on array invalidates pointers to existing array items [\#1586](https://github.com/nlohmann/json/issues/1586)
|
||||
- Getting nlohmann::detail::parse\_error on JSON generated by nlohmann::json not sure why [\#1583](https://github.com/nlohmann/json/issues/1583)
|
||||
- getting error terminate called after throwing an instance of 'std::domain\_error' what\(\): cannot use at\(\) with string [\#1582](https://github.com/nlohmann/json/issues/1582)
|
||||
- how i create json file [\#1581](https://github.com/nlohmann/json/issues/1581)
|
||||
- prevent rounding of double datatype values [\#1580](https://github.com/nlohmann/json/issues/1580)
|
||||
- Documentation Container Overview Doesn't Reference Const Methods [\#1579](https://github.com/nlohmann/json/issues/1579)
|
||||
- Writing an array into a nlohmann::json object [\#1578](https://github.com/nlohmann/json/issues/1578)
|
||||
- compilation error when using with another library [\#1577](https://github.com/nlohmann/json/issues/1577)
|
||||
- Homebrew on OSX doesn't install cmake config file [\#1576](https://github.com/nlohmann/json/issues/1576)
|
||||
- `unflatten` vs objects with number-ish keys [\#1575](https://github.com/nlohmann/json/issues/1575)
|
||||
- JSON Parse Out of Range Error [\#1574](https://github.com/nlohmann/json/issues/1574)
|
||||
- Integrating into existing CMake Project [\#1573](https://github.com/nlohmann/json/issues/1573)
|
||||
- A "thinner" source code tar as part of release? [\#1572](https://github.com/nlohmann/json/issues/1572)
|
||||
- conversion to std::string failed [\#1571](https://github.com/nlohmann/json/issues/1571)
|
||||
- jPtr operation does not throw [\#1569](https://github.com/nlohmann/json/issues/1569)
|
||||
- How to generate dll file for this project [\#1568](https://github.com/nlohmann/json/issues/1568)
|
||||
- how to pass variable data to json in c [\#1567](https://github.com/nlohmann/json/issues/1567)
|
||||
- I want to achieve an upgraded function. [\#1566](https://github.com/nlohmann/json/issues/1566)
|
||||
- How to determine the type of elements read from a JSON array? [\#1564](https://github.com/nlohmann/json/issues/1564)
|
||||
- try\_get\_to [\#1563](https://github.com/nlohmann/json/issues/1563)
|
||||
- example code compile error [\#1562](https://github.com/nlohmann/json/issues/1562)
|
||||
- How to iterate over nested json object [\#1561](https://github.com/nlohmann/json/issues/1561)
|
||||
- Build Option/Separate Function to Allow to Throw on Duplicate Keys [\#1560](https://github.com/nlohmann/json/issues/1560)
|
||||
- Compiler Switches -Weffc++ & -Wshadow are throwing errors [\#1558](https://github.com/nlohmann/json/issues/1558)
|
||||
- warning: use of the 'nodiscard' attribute is a C++17 extension [\#1557](https://github.com/nlohmann/json/issues/1557)
|
||||
- Import/Export compressed JSON files [\#1556](https://github.com/nlohmann/json/issues/1556)
|
||||
- GDB renderers for json library [\#1554](https://github.com/nlohmann/json/issues/1554)
|
||||
- Is it possible to construct a json string object from a binary buffer? [\#1553](https://github.com/nlohmann/json/issues/1553)
|
||||
- json objects in list [\#1552](https://github.com/nlohmann/json/issues/1552)
|
||||
- Matrix output [\#1550](https://github.com/nlohmann/json/issues/1550)
|
||||
- Using json merge\_patch on ordered non-alphanumeric datasets [\#1549](https://github.com/nlohmann/json/issues/1549)
|
||||
- Invalid parsed value for big integer [\#1548](https://github.com/nlohmann/json/issues/1548)
|
||||
- Integrating with android ndk issues. [\#1547](https://github.com/nlohmann/json/issues/1547)
|
||||
- add noexcept json::value\("key", default\) method variant? [\#1546](https://github.com/nlohmann/json/issues/1546)
|
||||
- Thank you! 🙌 [\#1545](https://github.com/nlohmann/json/issues/1545)
|
||||
- Output and input matrix [\#1544](https://github.com/nlohmann/json/issues/1544)
|
||||
- Add regression tests for MSVC [\#1543](https://github.com/nlohmann/json/issues/1543)
|
||||
- \[Help Needed!\] Season of Docs [\#1542](https://github.com/nlohmann/json/issues/1542)
|
||||
- program still abort\(\) or exit\(\) with try catch [\#1541](https://github.com/nlohmann/json/issues/1541)
|
||||
- Have a json::type\_error exception because of JSON object [\#1540](https://github.com/nlohmann/json/issues/1540)
|
||||
- Using versioned namespaces [\#1539](https://github.com/nlohmann/json/issues/1539)
|
||||
- Quoted numbers [\#1538](https://github.com/nlohmann/json/issues/1538)
|
||||
- Reading a JSON file into an object [\#1537](https://github.com/nlohmann/json/issues/1537)
|
||||
- Releases 3.6.0 and 3.6.1 don't build on conda / windows [\#1536](https://github.com/nlohmann/json/issues/1536)
|
||||
- \[Clang\] warning: use of the 'nodiscard' attribute is a C++17 extension \[-Wc++17-extensions\] [\#1535](https://github.com/nlohmann/json/issues/1535)
|
||||
- wchar\_t/std::wstring json can be created but not accessed [\#1533](https://github.com/nlohmann/json/issues/1533)
|
||||
- json stringify [\#1532](https://github.com/nlohmann/json/issues/1532)
|
||||
- How can I use std::string\_view as the json\_key to "operator \[\]" ? [\#1529](https://github.com/nlohmann/json/issues/1529)
|
||||
- How can I use it from gcc on RPI [\#1528](https://github.com/nlohmann/json/issues/1528)
|
||||
- std::pair treated as an array instead of key-value in `std::vector\<std::pair\<\>\>` [\#1520](https://github.com/nlohmann/json/issues/1520)
|
||||
- Excessive Memory Usage for Large Json File [\#1516](https://github.com/nlohmann/json/issues/1516)
|
||||
- SAX dumper [\#1512](https://github.com/nlohmann/json/issues/1512)
|
||||
- Conversion to user type containing a std::vector not working with documented approach [\#1511](https://github.com/nlohmann/json/issues/1511)
|
||||
- How to get position info or parser context with custom from\_json\(\) that may throw exceptions? [\#1508](https://github.com/nlohmann/json/issues/1508)
|
||||
- Inconsistent use of type alias. [\#1507](https://github.com/nlohmann/json/issues/1507)
|
||||
- Is there a current way to represent strings as json int? [\#1503](https://github.com/nlohmann/json/issues/1503)
|
||||
- Intermittent issues with loadJSON [\#1484](https://github.com/nlohmann/json/issues/1484)
|
||||
- use json construct std::string [\#1462](https://github.com/nlohmann/json/issues/1462)
|
||||
- JSON Creation [\#1461](https://github.com/nlohmann/json/issues/1461)
|
||||
- Substantial performance penalty caused by polymorphic input adapter [\#1457](https://github.com/nlohmann/json/issues/1457)
|
||||
- Null bytes in files are treated like EOF [\#1095](https://github.com/nlohmann/json/issues/1095)
|
||||
- Feature: to\_string\(const json& j\); [\#916](https://github.com/nlohmann/json/issues/916)
|
||||
|
||||
- Use GNUInstallDirs instead of hard-coded path. [\#1673](https://github.com/nlohmann/json/pull/1673) ([remyabel](https://github.com/remyabel))
|
||||
- Package Manager: MSYS2 \(pacman\) [\#1670](https://github.com/nlohmann/json/pull/1670) ([podsvirov](https://github.com/podsvirov))
|
||||
- Fix json.hpp compilation issue with other typedefs with same name \(Issue \#1642\) [\#1643](https://github.com/nlohmann/json/pull/1643) ([kevinlul](https://github.com/kevinlul))
|
||||
- Add explicit conversion from json to std::string\_view in conversion unit test [\#1639](https://github.com/nlohmann/json/pull/1639) ([taylorhoward92](https://github.com/taylorhoward92))
|
||||
- Minor fixes in docs [\#1625](https://github.com/nlohmann/json/pull/1625) ([nickaein](https://github.com/nickaein))
|
||||
- Fix broken links to documentation [\#1598](https://github.com/nlohmann/json/pull/1598) ([nickaein](https://github.com/nickaein))
|
||||
- Added to\_string and added basic tests [\#1585](https://github.com/nlohmann/json/pull/1585) ([Macr0Nerd](https://github.com/Macr0Nerd))
|
||||
- Regression tests for MSVC [\#1570](https://github.com/nlohmann/json/pull/1570) ([nickaein](https://github.com/nickaein))
|
||||
- Fix/1511 [\#1555](https://github.com/nlohmann/json/pull/1555) ([theodelrieu](https://github.com/theodelrieu))
|
||||
- Remove C++17 extension warning from clang; \#1535 [\#1551](https://github.com/nlohmann/json/pull/1551) ([heavywatal](https://github.com/heavywatal))
|
||||
- moved from Catch to doctest for unit tests [\#1439](https://github.com/nlohmann/json/pull/1439) ([onqtam](https://github.com/onqtam))
|
||||
|
||||
## [v3.6.1](https://github.com/nlohmann/json/releases/tag/v3.6.1) (2019-03-20)
|
||||
[Full Changelog](https://github.com/nlohmann/json/compare/v3.6.0...v3.6.1)
|
||||
|
||||
|
@ -201,7 +345,7 @@ All notable changes to this project will be documented in this file. This projec
|
|||
- Fix merge\_patch shadow warning [\#1346](https://github.com/nlohmann/json/pull/1346) ([ax3l](https://github.com/ax3l))
|
||||
- Allow installation via Meson [\#1345](https://github.com/nlohmann/json/pull/1345) ([mpoquet](https://github.com/mpoquet))
|
||||
- Set eofbit on exhausted input stream. [\#1343](https://github.com/nlohmann/json/pull/1343) ([mefyl](https://github.com/mefyl))
|
||||
- Add a SFINAE friendly iterator\_traits and use that instead. [\#1342](https://github.com/nlohmann/json/pull/1342) ([davedissian](https://github.com/davedissian))
|
||||
- Add a SFINAE friendly iterator\_traits and use that instead. [\#1342](https://github.com/nlohmann/json/pull/1342) ([dgavedissian](https://github.com/dgavedissian))
|
||||
- Fix EOL Whitespaces & CMake Spelling [\#1329](https://github.com/nlohmann/json/pull/1329) ([ax3l](https://github.com/ax3l))
|
||||
|
||||
## [v3.4.0](https://github.com/nlohmann/json/releases/tag/v3.4.0) (2018-10-30)
|
||||
|
|
270
Makefile
270
Makefile
|
@ -1,44 +1,31 @@
|
|||
.PHONY: pretty clean ChangeLog.md
|
||||
.PHONY: pretty clean ChangeLog.md release
|
||||
|
||||
SRCS = include/nlohmann/json.hpp \
|
||||
include/nlohmann/json_fwd.hpp \
|
||||
include/nlohmann/adl_serializer.hpp \
|
||||
include/nlohmann/detail/conversions/from_json.hpp \
|
||||
include/nlohmann/detail/conversions/to_chars.hpp \
|
||||
include/nlohmann/detail/conversions/to_json.hpp \
|
||||
include/nlohmann/detail/exceptions.hpp \
|
||||
include/nlohmann/detail/input/binary_reader.hpp \
|
||||
include/nlohmann/detail/input/input_adapters.hpp \
|
||||
include/nlohmann/detail/input/json_sax.hpp \
|
||||
include/nlohmann/detail/input/lexer.hpp \
|
||||
include/nlohmann/detail/input/parser.hpp \
|
||||
include/nlohmann/detail/input/position_t.hpp \
|
||||
include/nlohmann/detail/iterators/internal_iterator.hpp \
|
||||
include/nlohmann/detail/iterators/iter_impl.hpp \
|
||||
include/nlohmann/detail/iterators/iteration_proxy.hpp \
|
||||
include/nlohmann/detail/iterators/json_reverse_iterator.hpp \
|
||||
include/nlohmann/detail/iterators/primitive_iterator.hpp \
|
||||
include/nlohmann/detail/json_pointer.hpp \
|
||||
include/nlohmann/detail/json_ref.hpp \
|
||||
include/nlohmann/detail/macro_scope.hpp \
|
||||
include/nlohmann/detail/macro_unscope.hpp \
|
||||
include/nlohmann/detail/meta/cpp_future.hpp \
|
||||
include/nlohmann/detail/meta/detected.hpp \
|
||||
include/nlohmann/detail/meta/type_traits.hpp \
|
||||
include/nlohmann/detail/meta/void_t.hpp \
|
||||
include/nlohmann/detail/output/binary_writer.hpp \
|
||||
include/nlohmann/detail/output/output_adapters.hpp \
|
||||
include/nlohmann/detail/output/serializer.hpp \
|
||||
include/nlohmann/detail/value_t.hpp
|
||||
|
||||
UNAME = $(shell uname)
|
||||
CXX=clang++
|
||||
|
||||
AMALGAMATED_FILE=single_include/nlohmann/json.hpp
|
||||
##########################################################################
|
||||
# configuration
|
||||
##########################################################################
|
||||
|
||||
# directory to recent compiler binaries
|
||||
COMPILER_DIR=/Users/niels/Documents/projects/compilers/local/bin
|
||||
|
||||
# find GNU sed to use `-i` parameter
|
||||
SED:=$(shell command -v gsed || which sed)
|
||||
|
||||
|
||||
##########################################################################
|
||||
# source files
|
||||
##########################################################################
|
||||
|
||||
# the list of sources in the include folder
|
||||
SRCS=$(shell find include -type f | sort)
|
||||
|
||||
# the single header (amalgamated from the source files)
|
||||
AMALGAMATED_FILE=single_include/nlohmann/json.hpp
|
||||
|
||||
|
||||
##########################################################################
|
||||
# documentation of the Makefile's targets
|
||||
##########################################################################
|
||||
|
||||
# main target
|
||||
all:
|
||||
@echo "amalgamate - amalgamate file single_include/nlohmann/json.hpp from the include/nlohmann sources"
|
||||
|
@ -48,6 +35,7 @@ all:
|
|||
@echo "check-fast - compile and execute test suite (skip long-running tests)"
|
||||
@echo "clean - remove built files"
|
||||
@echo "coverage - create coverage information with lcov"
|
||||
@echo "coverage-fast - create coverage information with fastcov"
|
||||
@echo "cppcheck - analyze code with cppcheck"
|
||||
@echo "cpplint - analyze code with cpplint"
|
||||
@echo "clang_tidy - analyze code with Clang-Tidy"
|
||||
|
@ -64,6 +52,7 @@ all:
|
|||
@echo "pretty - beautify code with Artistic Style"
|
||||
@echo "run_benchmarks - build and run benchmarks"
|
||||
|
||||
|
||||
##########################################################################
|
||||
# unit tests
|
||||
##########################################################################
|
||||
|
@ -76,30 +65,32 @@ json_unit:
|
|||
check:
|
||||
$(MAKE) check -C test
|
||||
|
||||
# run unit tests and skip expensive tests
|
||||
check-fast:
|
||||
$(MAKE) check -C test TEST_PATTERN=""
|
||||
|
||||
# clean up
|
||||
clean:
|
||||
rm -fr json_unit json_benchmarks fuzz fuzz-testing *.dSYM test/*.dSYM
|
||||
rm -fr benchmarks/files/numbers/*.json
|
||||
rm -fr build_coverage build_benchmarks
|
||||
$(MAKE) clean -Cdoc
|
||||
$(MAKE) clean -Ctest
|
||||
|
||||
|
||||
##########################################################################
|
||||
# coverage
|
||||
##########################################################################
|
||||
|
||||
coverage:
|
||||
rm -fr build_coverage
|
||||
mkdir build_coverage
|
||||
cd build_coverage ; CXX=g++-7 cmake .. -GNinja -DJSON_Coverage=ON -DJSON_MultipleHeaders=ON
|
||||
cd build_coverage ; CXX=g++-8 cmake .. -GNinja -DJSON_Coverage=ON -DJSON_MultipleHeaders=ON
|
||||
cd build_coverage ; ninja
|
||||
cd build_coverage ; ctest -E '.*_default' -j10
|
||||
cd build_coverage ; ninja lcov_html
|
||||
open build_coverage/test/html/index.html
|
||||
|
||||
coverage-fast:
|
||||
rm -fr build_coverage
|
||||
mkdir build_coverage
|
||||
cd build_coverage ; CXX=g++-9 cmake .. -GNinja -DJSON_Coverage=ON -DJSON_MultipleHeaders=ON
|
||||
cd build_coverage ; ninja
|
||||
cd build_coverage ; ctest -E '.*_default' -j10
|
||||
cd build_coverage ; ninja fastcov_html
|
||||
open build_coverage/test/html/index.html
|
||||
|
||||
##########################################################################
|
||||
# documentation tests
|
||||
|
@ -115,36 +106,35 @@ doctest:
|
|||
##########################################################################
|
||||
|
||||
# calling Clang with all warnings, except:
|
||||
# -Wno-documentation-unknown-command: code uses user-defined commands like @complexity
|
||||
# -Wno-exit-time-destructors: warning in Catch code
|
||||
# -Wno-keyword-macro: unit-tests use "#define private public"
|
||||
# -Wno-deprecated-declarations: the library deprecated some functions
|
||||
# -Wno-weak-vtables: exception class is defined inline, but has virtual method
|
||||
# -Wno-range-loop-analysis: items tests "for(const auto i...)"
|
||||
# -Wno-float-equal: not all comparisons in the tests can be replaced by Approx
|
||||
# -Wno-switch-enum -Wno-covered-switch-default: pedantic/contradicting warnings about switches
|
||||
# -Wno-c++2a-compat: u8 literals will behave differently in C++20...
|
||||
# -Wno-deprecated-declarations: the library deprecated some functions
|
||||
# -Wno-documentation-unknown-command: code uses user-defined commands like @complexity
|
||||
# -Wno-exit-time-destructors: warning in json code triggered by NLOHMANN_JSON_SERIALIZE_ENUM
|
||||
# -Wno-float-equal: not all comparisons in the tests can be replaced by Approx
|
||||
# -Wno-keyword-macro: unit-tests use "#define private public"
|
||||
# -Wno-padded: padding is nothing to warn about
|
||||
# -Wno-range-loop-analysis: items tests "for(const auto i...)"
|
||||
# -Wno-switch-enum -Wno-covered-switch-default: pedantic/contradicting warnings about switches
|
||||
# -Wno-weak-vtables: exception class is defined inline, but has virtual method
|
||||
pedantic_clang:
|
||||
$(MAKE) json_unit CXX=$(COMPILER_DIR)/clang++ CXXFLAGS=" \
|
||||
$(MAKE) json_unit CXX=c++ CXXFLAGS=" \
|
||||
-std=c++11 -Wno-c++98-compat -Wno-c++98-compat-pedantic \
|
||||
-Werror \
|
||||
-Weverything \
|
||||
-Wno-c++2a-compat \
|
||||
-Wno-deprecated-declarations \
|
||||
-Wno-documentation-unknown-command \
|
||||
-Wno-exit-time-destructors \
|
||||
-Wno-keyword-macro \
|
||||
-Wno-deprecated-declarations \
|
||||
-Wno-weak-vtables \
|
||||
-Wno-range-loop-analysis \
|
||||
-Wno-float-equal \
|
||||
-Wno-keyword-macro \
|
||||
-Wno-padded \
|
||||
-Wno-range-loop-analysis \
|
||||
-Wno-switch-enum -Wno-covered-switch-default \
|
||||
-Wno-c++2a-compat \
|
||||
-Wno-c++17-extensions \
|
||||
-Wno-padded"
|
||||
-Wno-weak-vtables"
|
||||
|
||||
# calling GCC with most warnings
|
||||
pedantic_gcc:
|
||||
$(MAKE) json_unit CXX=$(COMPILER_DIR)/g++ CXXFLAGS=" \
|
||||
$(MAKE) json_unit CXX=/usr/local/bin/g++-9 CXXFLAGS=" \
|
||||
-std=c++11 \
|
||||
-Waddress \
|
||||
-Waddress-of-packed-member \
|
||||
|
@ -201,7 +191,6 @@ pedantic_gcc:
|
|||
-Wextra \
|
||||
-Wextra-semi \
|
||||
-Wfloat-conversion \
|
||||
-Wfloat-equal \
|
||||
-Wformat \
|
||||
-Wformat-contains-nul \
|
||||
-Wformat-extra-args \
|
||||
|
@ -251,6 +240,7 @@ pedantic_gcc:
|
|||
-Wmultistatement-macros \
|
||||
-Wnarrowing \
|
||||
-Wno-deprecated-declarations \
|
||||
-Wno-float-equal \
|
||||
-Wno-long-long \
|
||||
-Wno-namespaces \
|
||||
-Wno-padded \
|
||||
|
@ -372,9 +362,10 @@ pedantic_gcc:
|
|||
##########################################################################
|
||||
|
||||
run_benchmarks:
|
||||
rm -fr build_benchmarks
|
||||
mkdir build_benchmarks
|
||||
cd build_benchmarks ; cmake ../benchmarks
|
||||
cd build_benchmarks ; make
|
||||
cd build_benchmarks ; cmake ../benchmarks -GNinja -DCMAKE_BUILD_TYPE=Release
|
||||
cd build_benchmarks ; ninja
|
||||
cd build_benchmarks ; ./json_benchmarks
|
||||
|
||||
##########################################################################
|
||||
|
@ -436,28 +427,39 @@ fuzzing-stop:
|
|||
-killall fuzzer
|
||||
-killall afl-fuzz
|
||||
|
||||
|
||||
##########################################################################
|
||||
# static analyzer
|
||||
# Static analysis
|
||||
##########################################################################
|
||||
|
||||
# call cppcheck on the main header file
|
||||
# call cppcheck <http://cppcheck.sourceforge.net>
|
||||
# Note: this target is called by Travis
|
||||
cppcheck:
|
||||
cppcheck --enable=warning --inline-suppr --inconclusive --force --std=c++11 $(SRCS) --error-exitcode=1
|
||||
|
||||
# compile and check with Clang Static Analyzer
|
||||
# call Clang Static Analyzer <https://clang-analyzer.llvm.org>
|
||||
clang_analyze:
|
||||
rm -fr clang_analyze_build
|
||||
mkdir clang_analyze_build
|
||||
cd clang_analyze_build ; CCC_CXX=$(COMPILER_DIR)/clang++ CXX=$(COMPILER_DIR)/clang++ $(COMPILER_DIR)/scan-build cmake .. -GNinja
|
||||
cd clang_analyze_build ; $(COMPILER_DIR)/scan-build -enable-checker alpha.core.BoolAssignment,alpha.core.CallAndMessageUnInitRefArg,alpha.core.CastSize,alpha.core.CastToStruct,alpha.core.Conversion,alpha.core.DynamicTypeChecker,alpha.core.FixedAddr,alpha.core.PointerArithm,alpha.core.PointerSub,alpha.core.SizeofPtr,alpha.core.StackAddressAsyncEscape,alpha.core.TestAfterDivZero,alpha.deadcode.UnreachableCode,core.builtin.BuiltinFunctions,core.builtin.NoReturnFunctions,core.CallAndMessage,core.DivideZero,core.DynamicTypePropagation,core.NonnilStringConstants,core.NonNullParamChecker,core.NullDereference,core.StackAddressEscape,core.UndefinedBinaryOperatorResult,core.uninitialized.ArraySubscript,core.uninitialized.Assign,core.uninitialized.Branch,core.uninitialized.CapturedBlockVariable,core.uninitialized.UndefReturn,core.VLASize,cplusplus.InnerPointer,cplusplus.Move,cplusplus.NewDelete,cplusplus.NewDeleteLeaks,cplusplus.SelfAssignment,deadcode.DeadStores,nullability.NullableDereferenced,nullability.NullablePassedToNonnull,nullability.NullableReturnedFromNonnull,nullability.NullPassedToNonnull,nullability.NullReturnedFromNonnull --use-c++=$(COMPILER_DIR)/clang++ --view -analyze-headers -o clang_analyze_build/report.html ninja
|
||||
cd clang_analyze_build ; \
|
||||
$(COMPILER_DIR)/scan-build \
|
||||
-enable-checker alpha.core.BoolAssignment,alpha.core.CallAndMessageUnInitRefArg,alpha.core.CastSize,alpha.core.CastToStruct,alpha.core.Conversion,alpha.core.DynamicTypeChecker,alpha.core.FixedAddr,alpha.core.PointerArithm,alpha.core.PointerSub,alpha.core.SizeofPtr,alpha.core.StackAddressAsyncEscape,alpha.core.TestAfterDivZero,alpha.deadcode.UnreachableCode,core.builtin.BuiltinFunctions,core.builtin.NoReturnFunctions,core.CallAndMessage,core.DivideZero,core.DynamicTypePropagation,core.NonnilStringConstants,core.NonNullParamChecker,core.NullDereference,core.StackAddressEscape,core.UndefinedBinaryOperatorResult,core.uninitialized.ArraySubscript,core.uninitialized.Assign,core.uninitialized.Branch,core.uninitialized.CapturedBlockVariable,core.uninitialized.UndefReturn,core.VLASize,cplusplus.InnerPointer,cplusplus.Move,cplusplus.NewDelete,cplusplus.NewDeleteLeaks,cplusplus.SelfAssignment,deadcode.DeadStores,nullability.NullableDereferenced,nullability.NullablePassedToNonnull,nullability.NullableReturnedFromNonnull,nullability.NullPassedToNonnull,nullability.NullReturnedFromNonnull \
|
||||
--use-c++=$(COMPILER_DIR)/clang++ -analyze-headers -o report ninja
|
||||
open clang_analyze_build/report/*/index.html
|
||||
|
||||
# call cpplint (some errors expected due to false positives)
|
||||
# call cpplint <https://github.com/cpplint/cpplint>
|
||||
# Note: some errors expected due to false positives
|
||||
cpplint:
|
||||
third_party/cpplint/cpplint.py --filter=-whitespace,-legal,-readability/alt_tokens,-runtime/references,-runtime/explicit --quiet --recursive include
|
||||
third_party/cpplint/cpplint.py \
|
||||
--filter=-whitespace,-legal,-readability/alt_tokens,-runtime/references,-runtime/explicit \
|
||||
--quiet --recursive $(SRCS)
|
||||
|
||||
# call Clang-Tidy <https://clang.llvm.org/extra/clang-tidy/>
|
||||
clang_tidy:
|
||||
$(COMPILER_DIR)/clang-tidy $(SRCS) -- -Iinclude -std=c++11
|
||||
|
||||
# call PVS-Studio Analyzer <https://www.viva64.com/en/pvs-studio/>
|
||||
pvs_studio:
|
||||
rm -fr pvs_studio_build
|
||||
mkdir pvs_studio_build
|
||||
|
@ -466,45 +468,70 @@ pvs_studio:
|
|||
cd pvs_studio_build ; plog-converter -a'GA:1,2;64:1;CS' -t fullhtml PVS-Studio.log -o pvs
|
||||
open pvs_studio_build/pvs/index.html
|
||||
|
||||
# call Infer <https://fbinfer.com> static analyzer
|
||||
infer:
|
||||
rm -fr infer_build
|
||||
mkdir infer_build
|
||||
cd infer_build ; infer compile -- cmake .. ; infer run -- make -j 4
|
||||
|
||||
# call OCLint <http://oclint.org> static analyzer
|
||||
oclint:
|
||||
oclint $(SRCS) -report-type html -enable-global-analysis -o oclint_report.html -max-priority-1=10000 -max-priority-2=10000 -max-priority-3=10000 -- -std=c++11 -Iinclude
|
||||
open oclint_report.html
|
||||
|
||||
# execute the test suite with Clang sanitizers (address and undefined behavior)
|
||||
clang_sanitize:
|
||||
rm -fr clang_sanitize_build
|
||||
mkdir clang_sanitize_build
|
||||
cd clang_sanitize_build ; CXX=$(COMPILER_DIR)/clang++ cmake .. -DJSON_Sanitizer=On -DJSON_MultipleHeaders=ON -GNinja
|
||||
cd clang_sanitize_build ; ninja
|
||||
cd clang_sanitize_build ; ctest -E '.*_default' -j10
|
||||
|
||||
|
||||
##########################################################################
|
||||
# maintainer targets
|
||||
# Code format and source amalgamation
|
||||
##########################################################################
|
||||
|
||||
# pretty printer
|
||||
# call the Artistic Style pretty printer on all source files
|
||||
pretty:
|
||||
astyle --style=allman --indent=spaces=4 --indent-modifiers \
|
||||
--indent-switches --indent-preproc-block --indent-preproc-define \
|
||||
--indent-col1-comments --pad-oper --pad-header --align-pointer=type \
|
||||
--align-reference=type --add-brackets --convert-tabs --close-templates \
|
||||
--lineend=linux --preserve-date --suffix=none --formatted \
|
||||
$(SRCS) $(AMALGAMATED_FILE) test/src/*.cpp \
|
||||
benchmarks/src/benchmarks.cpp doc/examples/*.cpp
|
||||
astyle \
|
||||
--style=allman \
|
||||
--indent=spaces=4 \
|
||||
--indent-modifiers \
|
||||
--indent-switches \
|
||||
--indent-preproc-block \
|
||||
--indent-preproc-define \
|
||||
--indent-col1-comments \
|
||||
--pad-oper \
|
||||
--pad-header \
|
||||
--align-pointer=type \
|
||||
--align-reference=type \
|
||||
--add-brackets \
|
||||
--convert-tabs \
|
||||
--close-templates \
|
||||
--lineend=linux \
|
||||
--preserve-date \
|
||||
--suffix=none \
|
||||
--formatted \
|
||||
$(SRCS) $(AMALGAMATED_FILE) test/src/*.cpp benchmarks/src/benchmarks.cpp doc/examples/*.cpp
|
||||
|
||||
# create single header file
|
||||
amalgamate: $(AMALGAMATED_FILE)
|
||||
|
||||
# call the amalgamation tool and pretty print
|
||||
$(AMALGAMATED_FILE): $(SRCS)
|
||||
third_party/amalgamate/amalgamate.py -c third_party/amalgamate/config.json -s . --verbose=yes
|
||||
$(MAKE) pretty
|
||||
|
||||
# check if single_include/nlohmann/json.hpp has been amalgamated from the nlohmann sources
|
||||
# check if file single_include/nlohmann/json.hpp has been amalgamated from the nlohmann sources
|
||||
# Note: this target is called by Travis
|
||||
check-amalgamation:
|
||||
@mv $(AMALGAMATED_FILE) $(AMALGAMATED_FILE)~
|
||||
@$(MAKE) amalgamate
|
||||
@diff $(AMALGAMATED_FILE) $(AMALGAMATED_FILE)~ || (echo "===================================================================\n Amalgamation required! Please read the contribution guidelines\n in file .github/CONTRIBUTING.md.\n===================================================================" ; mv $(AMALGAMATED_FILE)~ $(AMALGAMATED_FILE) ; false)
|
||||
@mv $(AMALGAMATED_FILE)~ $(AMALGAMATED_FILE)
|
||||
|
||||
# check if every header in nlohmann includes sufficient headers to be compiled
|
||||
# individually
|
||||
# check if every header in nlohmann includes sufficient headers to be compiled individually
|
||||
check-single-includes:
|
||||
@for x in $(SRCS); do \
|
||||
echo "Checking self-sufficiency of $$x..." ; \
|
||||
|
@ -513,29 +540,90 @@ check-single-includes:
|
|||
rm -f single_include_test.cpp single_include_test; \
|
||||
done
|
||||
|
||||
|
||||
##########################################################################
|
||||
# changelog
|
||||
# CMake
|
||||
##########################################################################
|
||||
|
||||
# grep "^option" CMakeLists.txt test/CMakeLists.txt | sed 's/(/ /' | awk '{print $2}' | xargs
|
||||
|
||||
# check if all flags of our CMake files work
|
||||
check_cmake_flags_do:
|
||||
$(CMAKE_BINARY) --version
|
||||
for flag in '' JSON_BuildTests JSON_Install JSON_MultipleHeaders JSON_Sanitizer JSON_Valgrind JSON_NoExceptions JSON_Coverage; do \
|
||||
rm -fr cmake_build; \
|
||||
mkdir cmake_build; \
|
||||
echo "$(CMAKE_BINARY) .. -D$$flag=On" ; \
|
||||
cd cmake_build ; \
|
||||
CXX=g++-8 $(CMAKE_BINARY) .. -D$$flag=On -DCMAKE_CXX_COMPILE_FEATURES="cxx_std_11;cxx_range_for" -DCMAKE_CXX_FLAGS="-std=gnu++11" ; \
|
||||
test -f Makefile || exit 1 ; \
|
||||
cd .. ; \
|
||||
done;
|
||||
|
||||
# call target `check_cmake_flags_do` twice: once for minimal required CMake version 3.1.0 and once for the installed version
|
||||
check_cmake_flags:
|
||||
wget https://github.com/Kitware/CMake/releases/download/v3.1.0/cmake-3.1.0-Darwin64.tar.gz
|
||||
tar xfz cmake-3.1.0-Darwin64.tar.gz
|
||||
CMAKE_BINARY=$(abspath cmake-3.1.0-Darwin64/CMake.app/Contents/bin/cmake) $(MAKE) check_cmake_flags_do
|
||||
CMAKE_BINARY=$(shell which cmake) $(MAKE) check_cmake_flags_do
|
||||
|
||||
|
||||
##########################################################################
|
||||
# ChangeLog
|
||||
##########################################################################
|
||||
|
||||
# Create a ChangeLog based on the git log using the GitHub Changelog Generator
|
||||
# (<https://github.com/github-changelog-generator/github-changelog-generator>).
|
||||
|
||||
# variable to control the diffs between the last released version and the current repository state
|
||||
NEXT_VERSION ?= "unreleased"
|
||||
|
||||
ChangeLog.md:
|
||||
github_changelog_generator -o ChangeLog.md --simple-list --release-url https://github.com/nlohmann/json/releases/tag/%s --future-release $(NEXT_VERSION)
|
||||
gsed -i 's|https://github.com/nlohmann/json/releases/tag/HEAD|https://github.com/nlohmann/json/tree/HEAD|' ChangeLog.md
|
||||
gsed -i '2i All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/).' ChangeLog.md
|
||||
$(SED) -i 's|https://github.com/nlohmann/json/releases/tag/HEAD|https://github.com/nlohmann/json/tree/HEAD|' ChangeLog.md
|
||||
$(SED) -i '2i All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/).' ChangeLog.md
|
||||
|
||||
|
||||
##########################################################################
|
||||
# release
|
||||
# Release files
|
||||
##########################################################################
|
||||
|
||||
# Create the files for a release and add signatures and hashes. We use `-X` to make the resulting ZIP file
|
||||
# reproducible, see <https://content.pivotal.io/blog/barriers-to-deterministic-reproducible-zip-files>.
|
||||
|
||||
release:
|
||||
rm -fr release_files
|
||||
mkdir release_files
|
||||
zip -9 -r include.zip include/*
|
||||
zip -9 --recurse-paths -X include.zip $(SRCS)
|
||||
gpg --armor --detach-sig include.zip
|
||||
mv include.zip include.zip.asc release_files
|
||||
gpg --armor --detach-sig single_include/nlohmann/json.hpp
|
||||
cp single_include/nlohmann/json.hpp release_files
|
||||
mv single_include/nlohmann/json.hpp.asc release_files
|
||||
gpg --armor --detach-sig $(AMALGAMATED_FILE)
|
||||
cp $(AMALGAMATED_FILE) release_files
|
||||
mv $(AMALGAMATED_FILE).asc release_files
|
||||
cd release_files ; shasum -a 256 json.hpp > hashes.txt
|
||||
cd release_files ; shasum -a 256 include.zip >> hashes.txt
|
||||
|
||||
|
||||
##########################################################################
|
||||
# Maintenance
|
||||
##########################################################################
|
||||
|
||||
# clean up
|
||||
clean:
|
||||
rm -fr json_unit json_benchmarks fuzz fuzz-testing *.dSYM test/*.dSYM oclint_report.html
|
||||
rm -fr benchmarks/files/numbers/*.json
|
||||
rm -fr cmake-3.1.0-Darwin64.tar.gz cmake-3.1.0-Darwin64
|
||||
rm -fr build_coverage build_benchmarks fuzz-testing clang_analyze_build pvs_studio_build infer_build clang_sanitize_build cmake_build
|
||||
$(MAKE) clean -Cdoc
|
||||
$(MAKE) clean -Ctest
|
||||
|
||||
##########################################################################
|
||||
# Thirdparty code
|
||||
##########################################################################
|
||||
|
||||
update_hedley:
|
||||
rm -f include/nlohmann/thirdparty/hedley/hedley.hpp include/nlohmann/thirdparty/hedley/hedley_undef.hpp
|
||||
curl https://raw.githubusercontent.com/nemequ/hedley/master/hedley.h -o include/nlohmann/thirdparty/hedley/hedley.hpp
|
||||
gsed -i 's/HEDLEY_/JSON_HEDLEY_/g' include/nlohmann/thirdparty/hedley/hedley.hpp
|
||||
grep "[[:blank:]]*#[[:blank:]]*undef" include/nlohmann/thirdparty/hedley/hedley.hpp | grep -v "__" | sort | uniq | gsed 's/ //g' | gsed 's/undef/undef /g' > include/nlohmann/thirdparty/hedley/hedley_undef.hpp
|
||||
$(MAKE) amalgamate
|
||||
|
|
135
README.md
135
README.md
|
@ -2,6 +2,7 @@
|
|||
|
||||
[![Build Status](https://travis-ci.org/nlohmann/json.svg?branch=master)](https://travis-ci.org/nlohmann/json)
|
||||
[![Build Status](https://ci.appveyor.com/api/projects/status/1acb366xfyg3qybk/branch/develop?svg=true)](https://ci.appveyor.com/project/nlohmann/json)
|
||||
[![Build Status](https://circleci.com/gh/nlohmann/json.svg?style=svg)](https://circleci.com/gh/nlohmann/json)
|
||||
[![Coverage Status](https://img.shields.io/coveralls/nlohmann/json.svg)](https://coveralls.io/r/nlohmann/json)
|
||||
[![Coverity Scan Build Status](https://scan.coverity.com/projects/5550/badge.svg)](https://scan.coverity.com/projects/nlohmann-json)
|
||||
[![Codacy Badge](https://api.codacy.com/project/badge/Grade/f3732b3327e34358a0e9d1fe9f661f08)](https://www.codacy.com/app/nlohmann/json?utm_source=github.com&utm_medium=referral&utm_content=nlohmann/json&utm_campaign=Badge_Grade)
|
||||
|
@ -149,7 +150,7 @@ endif()
|
|||
|
||||
### Package Managers
|
||||
|
||||
:beer: If you are using OS X and [Homebrew](http://brew.sh), just type `brew tap nlohmann/json` and `brew install nlohmann_json` and you're set. If you want the bleeding edge rather than the latest release, use `brew install nlohmann_json --HEAD`.
|
||||
:beer: If you are using OS X and [Homebrew](http://brew.sh), just type `brew tap nlohmann/json` and `brew install nlohmann-json` and you're set. If you want the bleeding edge rather than the latest release, use `brew install nlohmann-json --HEAD`.
|
||||
|
||||
If you are using the [Meson Build System](http://mesonbuild.com), then you can get a wrap file by downloading it from [Meson WrapDB](https://wrapdb.mesonbuild.com/nlohmann_json), or simply use `meson wrap install nlohmann_json`.
|
||||
|
||||
|
@ -171,6 +172,8 @@ If you are using [NuGet](https://www.nuget.org), you can use the package [nlohma
|
|||
|
||||
If you are using [conda](https://conda.io/), you can use the package [nlohmann_json](https://github.com/conda-forge/nlohmann_json-feedstock) from [conda-forge](https://conda-forge.org) executing `conda install -c conda-forge nlohmann_json`. Please file issues [here](https://github.com/conda-forge/nlohmann_json-feedstock/issues).
|
||||
|
||||
If you are using [MSYS2](http://www.msys2.org/), your can use the [mingw-w64-nlohmann_json](https://packages.msys2.org/base/mingw-w64-nlohmann_json) package, just type `pacman -S mingw-w64-i686-nlohmann_json` or `pacman -S mingw-w64-x86_64-nlohmann_json` for installation. Please file issues [here](https://github.com/msys2/MINGW-packages/issues/new?title=%5Bnlohmann_json%5D) if you experience problems with the packages.
|
||||
|
||||
## Examples
|
||||
|
||||
Beside the examples below, you may want to check the [documentation](https://nlohmann.github.io/json/) where each function contains a separate code example (e.g., check out [`emplace()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_a5338e282d1d02bed389d852dd670d98d.html#a5338e282d1d02bed389d852dd670d98d)). All [example files](https://github.com/nlohmann/json/tree/develop/doc/examples) can be compiled and executed on their own (e.g., file [emplace.cpp](https://github.com/nlohmann/json/blob/develop/doc/examples/emplace.cpp)).
|
||||
|
@ -242,7 +245,7 @@ json j2 = {
|
|||
};
|
||||
```
|
||||
|
||||
Note that in all these cases, you never need to "tell" the compiler which JSON value type you want to use. If you want to be explicit or express some edge cases, the functions [`json::array`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_aa80485befaffcadaa39965494e0b4d2e.html#aa80485befaffcadaa39965494e0b4d2e) and [`json::object`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_aa13f7c0615867542ce80337cbcf13ada.html#aa13f7c0615867542ce80337cbcf13ada) will help:
|
||||
Note that in all these cases, you never need to "tell" the compiler which JSON value type you want to use. If you want to be explicit or express some edge cases, the functions [`json::array()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_a9ad7ec0bc1082ed09d10900fbb20a21f.html#a9ad7ec0bc1082ed09d10900fbb20a21f) and [`json::object()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_aaf509a7c029100d292187068f61c99b8.html#aaf509a7c029100d292187068f61c99b8) will help:
|
||||
|
||||
```cpp
|
||||
// a way to express the empty array []
|
||||
|
@ -277,7 +280,7 @@ auto j2 = R"(
|
|||
|
||||
Note that without appending the `_json` suffix, the passed string literal is not parsed, but just used as JSON string value. That is, `json j = "{ \"happy\": true, \"pi\": 3.141 }"` would just store the string `"{ "happy": true, "pi": 3.141 }"` rather than parsing the actual object.
|
||||
|
||||
The above example can also be expressed explicitly using [`json::parse()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_a5a0339361f3282cb8fd2f9ede6e17d72.html#a5a0339361f3282cb8fd2f9ede6e17d72):
|
||||
The above example can also be expressed explicitly using [`json::parse()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_afd4ef1ac8ad50a5894a9afebca69140a.html#afd4ef1ac8ad50a5894a9afebca69140a):
|
||||
|
||||
```cpp
|
||||
// parse explicitly
|
||||
|
@ -320,7 +323,7 @@ std::cout << cpp_string << " == " << cpp_string2 << " == " << j_string.get<std::
|
|||
std::cout << j_string << " == " << serialized_string << std::endl;
|
||||
```
|
||||
|
||||
[`.dump()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_a50ec80b02d0f3f51130d4abb5d1cfdc5.html#a50ec80b02d0f3f51130d4abb5d1cfdc5) always returns the serialized value, and [`.get<std::string>()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_a16f9445f7629f634221a42b967cdcd43.html#a16f9445f7629f634221a42b967cdcd43) returns the originally stored string value.
|
||||
[`.dump()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_a50ec80b02d0f3f51130d4abb5d1cfdc5.html#a50ec80b02d0f3f51130d4abb5d1cfdc5) always returns the serialized value, and [`.get<std::string>()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_aa6602bb24022183ab989439e19345d08.html#aa6602bb24022183ab989439e19345d08) returns the originally stored string value.
|
||||
|
||||
Note the library only supports UTF-8. When you store strings with different encodings in the library, calling [`dump()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_a50ec80b02d0f3f51130d4abb5d1cfdc5.html#a50ec80b02d0f3f51130d4abb5d1cfdc5) may throw an exception unless `json::error_handler_t::replace` or `json::error_handler_t::ignore` are used as error handlers.
|
||||
|
||||
|
@ -1021,35 +1024,39 @@ Please note:
|
|||
|
||||
- Unsupported versions of GCC and Clang are rejected by `#error` directives. This can be switched off by defining `JSON_SKIP_UNSUPPORTED_COMPILER_CHECK`. Note that you can expect no support in this case.
|
||||
|
||||
The following compilers are currently used in continuous integration at [Travis](https://travis-ci.org/nlohmann/json) and [AppVeyor](https://ci.appveyor.com/project/nlohmann/json):
|
||||
The following compilers are currently used in continuous integration at [Travis](https://travis-ci.org/nlohmann/json), [AppVeyor](https://ci.appveyor.com/project/nlohmann/json), and [Doozer](https://doozer.io):
|
||||
|
||||
| Compiler | Operating System | Version String |
|
||||
|-----------------|------------------------------|----------------|
|
||||
| GCC 4.8.5 | Ubuntu 14.04.5 LTS | g++-4.8 (Ubuntu 4.8.5-2ubuntu1~14.04.2) 4.8.5 |
|
||||
| GCC 4.9.4 | Ubuntu 14.04.1 LTS | g++-4.9 (Ubuntu 4.9.4-2ubuntu1~14.04.1) 4.9.4 |
|
||||
| GCC 5.5.0 | Ubuntu 14.04.1 LTS | g++-5 (Ubuntu 5.5.0-12ubuntu1~14.04) 5.5.0 20171010 |
|
||||
| GCC 6.4.0 | Ubuntu 14.04.1 LTS | g++-6 (Ubuntu 6.4.0-17ubuntu1~14.04) 6.4.0 20180424 |
|
||||
| GCC 7.3.0 | Ubuntu 14.04.1 LTS | g++-7 (Ubuntu 7.3.0-21ubuntu1~14.04) 7.3.0 |
|
||||
| GCC 7.3.0 | Windows Server 2012 R2 (x64) | g++ (x86_64-posix-seh-rev0, Built by MinGW-W64 project) 7.3.0 |
|
||||
| GCC 8.1.0 | Ubuntu 14.04.1 LTS | g++-8 (Ubuntu 8.1.0-5ubuntu1~14.04) 8.1.0 |
|
||||
| Clang 3.5.0 | Ubuntu 14.04.1 LTS | clang version 3.5.0-4ubuntu2~trusty2 (tags/RELEASE_350/final) (based on LLVM 3.5.0) |
|
||||
| Clang 3.6.2 | Ubuntu 14.04.1 LTS | clang version 3.6.2-svn240577-1~exp1 (branches/release_36) (based on LLVM 3.6.2) |
|
||||
| Clang 3.7.1 | Ubuntu 14.04.1 LTS | clang version 3.7.1-svn253571-1~exp1 (branches/release_37) (based on LLVM 3.7.1) |
|
||||
| Clang 3.8.0 | Ubuntu 14.04.1 LTS | clang version 3.8.0-2ubuntu3~trusty5 (tags/RELEASE_380/final) |
|
||||
| Clang 3.9.1 | Ubuntu 14.04.1 LTS | clang version 3.9.1-4ubuntu3~14.04.3 (tags/RELEASE_391/rc2) |
|
||||
| Clang 4.0.1 | Ubuntu 14.04.1 LTS | clang version 4.0.1-svn305264-1~exp1 (branches/release_40) |
|
||||
| Clang 5.0.2 | Ubuntu 14.04.1 LTS | clang version 5.0.2-svn328729-1~exp1~20180509123505.100 (branches/release_50) |
|
||||
| Clang 6.0.1 | Ubuntu 14.04.1 LTS | clang version 6.0.1-svn334776-1~exp1~20180726133705.85 (branches/release_60) |
|
||||
| Clang 7.0.1 | Ubuntu 14.04.1 LTS | clang version 7.0.1-svn348686-1~exp1~20181213084532.54 (branches/release_70) |
|
||||
| Clang Xcode 8.3 | OSX 10.11.6 | Apple LLVM version 8.1.0 (clang-802.0.38) |
|
||||
| Clang Xcode 9.0 | OSX 10.12.6 | Apple LLVM version 9.0.0 (clang-900.0.37) |
|
||||
| Clang Xcode 9.1 | OSX 10.12.6 | Apple LLVM version 9.0.0 (clang-900.0.38) |
|
||||
| Clang Xcode 9.2 | OSX 10.13.3 | Apple LLVM version 9.1.0 (clang-902.0.39.1) |
|
||||
| Clang Xcode 9.3 | OSX 10.13.3 | Apple LLVM version 9.1.0 (clang-902.0.39.2) |
|
||||
| Clang Xcode 10.0 | OSX 10.13.3 | Apple LLVM version 10.0.0 (clang-1000.11.45.2) |
|
||||
| Clang Xcode 10.1 | OSX 10.13.3 | Apple LLVM version 10.0.0 (clang-1000.11.45.5) |
|
||||
| Compiler | Operating System | Version String |
|
||||
|-----------------------|------------------------------|----------------|
|
||||
| GCC 4.8.5 | Ubuntu 14.04.5 LTS | g++-4.8 (Ubuntu 4.8.5-2ubuntu1~14.04.2) 4.8.5 |
|
||||
| GCC 4.8.5 | CentOS Release-7-6.1810.2.el7.centos.x86_64 | g++ (GCC) 4.8.5 20150623 (Red Hat 4.8.5-36) |
|
||||
| GCC 4.9.2 (armv7l) | Raspbian GNU/Linux 8 (jessie) | g++ (Raspbian 4.9.2-10+deb8u2) 4.9.2 |
|
||||
| GCC 4.9.4 | Ubuntu 14.04.1 LTS | g++-4.9 (Ubuntu 4.9.4-2ubuntu1~14.04.1) 4.9.4 |
|
||||
| GCC 5.3.1 (armv7l) | Ubuntu 16.04 LTS | g++ (Ubuntu/Linaro 5.3.1-14ubuntu2) 5.3.1 20160413 |
|
||||
| GCC 5.5.0 | Ubuntu 14.04.1 LTS | g++-5 (Ubuntu 5.5.0-12ubuntu1~14.04) 5.5.0 20171010 |
|
||||
| GCC 6.3.1 | Fedora release 24 (Twenty Four) | g++ (GCC) 6.3.1 20161221 (Red Hat 6.3.1-1) |
|
||||
| GCC 6.4.0 | Ubuntu 14.04.1 LTS | g++-6 (Ubuntu 6.4.0-17ubuntu1~14.04) 6.4.0 20180424 |
|
||||
| GCC 7.3.0 | Ubuntu 14.04.1 LTS | g++-7 (Ubuntu 7.3.0-21ubuntu1~14.04) 7.3.0 |
|
||||
| GCC 7.3.0 | Windows Server 2012 R2 (x64) | g++ (x86_64-posix-seh-rev0, Built by MinGW-W64 project) 7.3.0 |
|
||||
| GCC 8.1.0 | Ubuntu 14.04.1 LTS | g++-8 (Ubuntu 8.1.0-5ubuntu1~14.04) 8.1.0 |
|
||||
| Clang 3.5.0 | Ubuntu 14.04.1 LTS | clang version 3.5.0-4ubuntu2~trusty2 (tags/RELEASE_350/final) (based on LLVM 3.5.0) |
|
||||
| Clang 3.6.2 | Ubuntu 14.04.1 LTS | clang version 3.6.2-svn240577-1~exp1 (branches/release_36) (based on LLVM 3.6.2) |
|
||||
| Clang 3.7.1 | Ubuntu 14.04.1 LTS | clang version 3.7.1-svn253571-1~exp1 (branches/release_37) (based on LLVM 3.7.1) |
|
||||
| Clang 3.8.0 | Ubuntu 14.04.1 LTS | clang version 3.8.0-2ubuntu3~trusty5 (tags/RELEASE_380/final) |
|
||||
| Clang 3.9.1 | Ubuntu 14.04.1 LTS | clang version 3.9.1-4ubuntu3~14.04.3 (tags/RELEASE_391/rc2) |
|
||||
| Clang 4.0.1 | Ubuntu 14.04.1 LTS | clang version 4.0.1-svn305264-1~exp1 (branches/release_40) |
|
||||
| Clang 5.0.2 | Ubuntu 14.04.1 LTS | clang version 5.0.2-svn328729-1~exp1~20180509123505.100 (branches/release_50) |
|
||||
| Clang 6.0.1 | Ubuntu 14.04.1 LTS | clang version 6.0.1-svn334776-1~exp1~20180726133705.85 (branches/release_60) |
|
||||
| Clang 7.0.1 | Ubuntu 14.04.1 LTS | clang version 7.0.1-svn348686-1~exp1~20181213084532.54 (branches/release_70) |
|
||||
| Clang Xcode 8.3 | OSX 10.11.6 | Apple LLVM version 8.1.0 (clang-802.0.38) |
|
||||
| Clang Xcode 9.0 | OSX 10.12.6 | Apple LLVM version 9.0.0 (clang-900.0.37) |
|
||||
| Clang Xcode 9.1 | OSX 10.12.6 | Apple LLVM version 9.0.0 (clang-900.0.38) |
|
||||
| Clang Xcode 9.2 | OSX 10.13.3 | Apple LLVM version 9.1.0 (clang-902.0.39.1) |
|
||||
| Clang Xcode 9.3 | OSX 10.13.3 | Apple LLVM version 9.1.0 (clang-902.0.39.2) |
|
||||
| Clang Xcode 10.0 | OSX 10.13.3 | Apple LLVM version 10.0.0 (clang-1000.11.45.2) |
|
||||
| Clang Xcode 10.1 | OSX 10.13.3 | Apple LLVM version 10.0.0 (clang-1000.11.45.5) |
|
||||
| Visual Studio 14 2015 | Windows Server 2012 R2 (x64) | Microsoft (R) Build Engine version 14.0.25420.1, MSVC 19.0.24215.1 |
|
||||
| Visual Studio 2017 | Windows Server 2016 | Microsoft (R) Build Engine version 15.7.180.61344, MSVC 19.14.26433.0 |
|
||||
| Visual Studio 2017 | Windows Server 2016 | Microsoft (R) Build Engine version 15.7.180.61344, MSVC 19.14.26433.0 |
|
||||
|
||||
## License
|
||||
|
||||
|
@ -1071,9 +1078,11 @@ The class contains the UTF-8 Decoder from Bjoern Hoehrmann which is licensed und
|
|||
|
||||
The class contains a slightly modified version of the Grisu2 algorithm from Florian Loitsch which is licensed under the [MIT License](http://opensource.org/licenses/MIT) (see above). Copyright © 2009 [Florian Loitsch](http://florian.loitsch.com/)
|
||||
|
||||
The class contains a copy of [Hedley](https://nemequ.github.io/hedley/) from Evan Nemerson which is licensed as [CC0-1.0](http://creativecommons.org/publicdomain/zero/1.0/).
|
||||
|
||||
## Contact
|
||||
|
||||
If you have questions regarding the library, I would like to invite you to [open an issue at GitHub](https://github.com/nlohmann/json/issues/new). Please describe your request, problem, or question as detailed as possible, and also mention the version of the library you are using as well as the version of your compiler and operating system. Opening an issue at GitHub allows other users and contributors to this library to collaborate. For instance, I have little experience with MSVC, and most issues in this regard have been solved by a growing community. If you have a look at the [closed issues](https://github.com/nlohmann/json/issues?q=is%3Aissue+is%3Aclosed), you will see that we react quite timely in most cases.
|
||||
If you have questions regarding the library, I would like to invite you to [open an issue at GitHub](https://github.com/nlohmann/json/issues/new/choose). Please describe your request, problem, or question as detailed as possible, and also mention the version of the library you are using as well as the version of your compiler and operating system. Opening an issue at GitHub allows other users and contributors to this library to collaborate. For instance, I have little experience with MSVC, and most issues in this regard have been solved by a growing community. If you have a look at the [closed issues](https://github.com/nlohmann/json/issues?q=is%3Aissue+is%3Aclosed), you will see that we react quite timely in most cases.
|
||||
|
||||
Only if your request would contain confidential information, please [send me an email](mailto:mail@nlohmann.me). For encrypted messages, please use [this key](https://keybase.io/nlohmann/pgp_keys.asc).
|
||||
|
||||
|
@ -1254,6 +1263,12 @@ I deeply appreciate the help of the following people.
|
|||
- [Hani](https://github.com/hnkb) documented how to install the library with NuGet.
|
||||
- [Mark Beckwith](https://github.com/wythe) fixed a typo.
|
||||
- [yann-morin-1998](https://github.com/yann-morin-1998) helped reducing the CMake requirement to version 3.1.
|
||||
- [Konstantin Podsvirov](https://github.com/podsvirov) maintains a package for the MSYS2 software distro.
|
||||
- [remyabel](https://github.com/remyabel) added GNUInstallDirs to the CMake files.
|
||||
- [Taylor Howard](https://github.com/taylorhoward92) fixed a unit test.
|
||||
- [Gabe Ron](https://github.com/Macr0Nerd) implemented the `to_string` method.
|
||||
- [Watal M. Iwasaki](https://github.com/heavywatal) fixed a Clang warning.
|
||||
- [Viktor Kirilov](https://github.com/onqtam) switched the unit tests from [Catch](https://github.com/philsquared/Catch) to [doctest](https://github.com/onqtam/doctest)
|
||||
|
||||
Thanks a lot for helping out! Please [let me know](mailto:mail@nlohmann.me) if I forgot someone.
|
||||
|
||||
|
@ -1265,18 +1280,23 @@ The library itself consists of a single header file licensed under the MIT licen
|
|||
- [**amalgamate.py - Amalgamate C source and header files**](https://github.com/edlund/amalgamate) to create a single header file
|
||||
- [**American fuzzy lop**](http://lcamtuf.coredump.cx/afl/) for fuzz testing
|
||||
- [**AppVeyor**](https://www.appveyor.com) for [continuous integration](https://ci.appveyor.com/project/nlohmann/json) on Windows
|
||||
- [**Artistic Style**](http://astyle.sourceforge.net) for automatic source code identation
|
||||
- [**Catch**](https://github.com/philsquared/Catch) for the unit tests
|
||||
- [**Artistic Style**](http://astyle.sourceforge.net) for automatic source code indentation
|
||||
- [**CircleCI**](http://circleci.com) for [continuous integration](https://circleci.com/gh/nlohmann/json).
|
||||
- [**Clang**](http://clang.llvm.org) for compilation with code sanitizers
|
||||
- [**CMake**](https://cmake.org) for build automation
|
||||
- [**Codacity**](https://www.codacy.com) for further [code analysis](https://www.codacy.com/app/nlohmann/json)
|
||||
- [**Coveralls**](https://coveralls.io) to measure [code coverage](https://coveralls.io/github/nlohmann/json)
|
||||
- [**Coverity Scan**](https://scan.coverity.com) for [static analysis](https://scan.coverity.com/projects/nlohmann-json)
|
||||
- [**cppcheck**](http://cppcheck.sourceforge.net) for static analysis
|
||||
- [**doctest**](https://github.com/onqtam/doctest) for the unit tests
|
||||
- [**Doozer**](https://doozer.io) for [continuous integration](https://doozer.io/nlohmann/json) on Linux (CentOS, Raspbian, Fedora)
|
||||
- [**Doxygen**](http://www.stack.nl/~dimitri/doxygen/) to generate [documentation](https://nlohmann.github.io/json/)
|
||||
- [**fastcov**](https://github.com/RPGillespie6/fastcov) to process coverage information
|
||||
- [**git-update-ghpages**](https://github.com/rstacruz/git-update-ghpages) to upload the documentation to gh-pages
|
||||
- [**GitHub Changelog Generator**](https://github.com/skywinder/github-changelog-generator) to generate the [ChangeLog](https://github.com/nlohmann/json/blob/develop/ChangeLog.md)
|
||||
- [**Google Benchmark**](https://github.com/google/benchmark) to implement the benchmarks
|
||||
- [**Hedley**](https://nemequ.github.io/hedley/) to avoid re-inventing several compiler-agnostic feature macros
|
||||
- [**lcov**](http://ltp.sourceforge.net/coverage/lcov.php) to process coverage information and create a HTML view
|
||||
- [**libFuzzer**](http://llvm.org/docs/LibFuzzer.html) to implement fuzz testing for OSS-Fuzz
|
||||
- [**OSS-Fuzz**](https://github.com/google/oss-fuzz) for continuous fuzz testing of the library ([project repository](https://github.com/google/oss-fuzz/tree/master/projects/json))
|
||||
- [**Probot**](https://probot.github.io) for automating maintainer tasks such as closing stale issues, requesting missing information, or detecting toxic comments.
|
||||
|
@ -1293,18 +1313,47 @@ The library is currently used in Apple macOS Sierra and iOS 10. I am not sure wh
|
|||
|
||||
## Notes
|
||||
|
||||
### Character encoding
|
||||
|
||||
The library supports **Unicode input** as follows:
|
||||
|
||||
- Only **UTF-8** encoded input is supported which is the default encoding for JSON according to [RFC 8259](https://tools.ietf.org/html/rfc8259.html#section-8.1).
|
||||
- `std::u16string` and `std::u32string` can be parsed, assuming UTF-16 and UTF-32 encoding, respectively. These encodings are not supported when reading from files or other input containers.
|
||||
- Other encodings such as Latin-1 or ISO 8859-1 are **not** supported and will yield parse or serialization errors.
|
||||
- [Unicode noncharacters](http://www.unicode.org/faq/private_use.html#nonchar1) will not be replaced by the library.
|
||||
- Invalid surrogates (e.g., incomplete pairs such as `\uDEAD`) will yield parse errors.
|
||||
- The strings stored in the library are UTF-8 encoded. When using the default string type (`std::string`), note that its length/size functions return the number of stored bytes rather than the number of characters or glyphs.
|
||||
- When you store strings with different encodings in the library, calling [`dump()`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_a50ec80b02d0f3f51130d4abb5d1cfdc5.html#a50ec80b02d0f3f51130d4abb5d1cfdc5) may throw an exception unless `json::error_handler_t::replace` or `json::error_handler_t::ignore` are used as error handlers.
|
||||
|
||||
### Comments in JSON
|
||||
|
||||
This library does not support comments. It does so for three reasons:
|
||||
|
||||
1. Comments are not part of the [JSON specification](https://tools.ietf.org/html/rfc8259). You may argue that `//` or `/* */` are allowed in JavaScript, but JSON is not JavaScript.
|
||||
2. This was not an oversight: Douglas Crockford [wrote on this](https://plus.google.com/118095276221607585885/posts/RK8qyGVaGSr) in May 2012:
|
||||
|
||||
> I removed comments from JSON because I saw people were using them to hold parsing directives, a practice which would have destroyed interoperability. I know that the lack of comments makes some people sad, but it shouldn't.
|
||||
|
||||
> Suppose you are using JSON to keep configuration files, which you would like to annotate. Go ahead and insert all the comments you like. Then pipe it through JSMin before handing it to your JSON parser.
|
||||
|
||||
3. It is dangerous for interoperability if some libraries would add comment support while others don't. Please check [The Harmful Consequences of the Robustness Principle](https://tools.ietf.org/html/draft-iab-protocol-maintenance-01) on this.
|
||||
|
||||
This library will not support comments in the future. If you wish to use comments, I see three options:
|
||||
|
||||
1. Strip comments before using this library.
|
||||
2. Use a different JSON library with comment support.
|
||||
3. Use a format that natively supports comments (e.g., YAML or JSON5).
|
||||
|
||||
### Order of object keys
|
||||
|
||||
By default, the library does not preserve the **insertion order of object elements**. This is standards-compliant, as the [JSON standard](https://tools.ietf.org/html/rfc8259.html) defines objects as "an unordered collection of zero or more name/value pairs". If you do want to preserve the insertion order, you can specialize the object type with containers like [`tsl::ordered_map`](https://github.com/Tessil/ordered-map) ([integration](https://github.com/nlohmann/json/issues/546#issuecomment-304447518)) or [`nlohmann::fifo_map`](https://github.com/nlohmann/fifo_map) ([integration](https://github.com/nlohmann/json/issues/485#issuecomment-333652309)).
|
||||
|
||||
### Further notes
|
||||
|
||||
- The code contains numerous debug **assertions** which can be switched off by defining the preprocessor macro `NDEBUG`, see the [documentation of `assert`](https://en.cppreference.com/w/cpp/error/assert). In particular, note [`operator[]`](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_a233b02b0839ef798942dd46157cc0fe6.html#a233b02b0839ef798942dd46157cc0fe6) implements **unchecked access** for const objects: If the given key is not present, the behavior is undefined (think of a dereferenced null pointer) and yields an [assertion failure](https://github.com/nlohmann/json/issues/289) if assertions are switched on. If you are not sure whether an element in an object exists, use checked access with the [`at()` function](https://nlohmann.github.io/json/classnlohmann_1_1basic__json_a73ae333487310e3302135189ce8ff5d8.html#a73ae333487310e3302135189ce8ff5d8).
|
||||
- As the exact type of a number is not defined in the [JSON specification](https://tools.ietf.org/html/rfc7159.html), this library tries to choose the best fitting C++ number type automatically. As a result, the type `double` may be used to store numbers which may yield [**floating-point exceptions**](https://github.com/nlohmann/json/issues/181) in certain rare situations if floating-point exceptions have been unmasked in the calling code. These exceptions are not caused by the library and need to be fixed in the calling code, such as by re-masking the exceptions prior to calling library functions.
|
||||
- The library supports **Unicode input** as follows:
|
||||
- Only **UTF-8** encoded input is supported which is the default encoding for JSON according to [RFC 7159](https://tools.ietf.org/html/rfc7159.html#section-8.1).
|
||||
- Other encodings such as Latin-1, UTF-16, or UTF-32 are not supported and will yield parse or serialization errors.
|
||||
- [Unicode noncharacters](http://www.unicode.org/faq/private_use.html#nonchar1) will not be replaced by the library.
|
||||
- Invalid surrogates (e.g., incomplete pairs such as `\uDEAD`) will yield parse errors.
|
||||
- The strings stored in the library are UTF-8 encoded. When using the default string type (`std::string`), note that its length/size functions return the number of stored bytes rather than the number of characters or glyphs.
|
||||
- As the exact type of a number is not defined in the [JSON specification](https://tools.ietf.org/html/rfc8259.html), this library tries to choose the best fitting C++ number type automatically. As a result, the type `double` may be used to store numbers which may yield [**floating-point exceptions**](https://github.com/nlohmann/json/issues/181) in certain rare situations if floating-point exceptions have been unmasked in the calling code. These exceptions are not caused by the library and need to be fixed in the calling code, such as by re-masking the exceptions prior to calling library functions.
|
||||
- The code can be compiled without C++ **runtime type identification** features; that is, you can use the `-fno-rtti` compiler flag.
|
||||
- **Exceptions** are used widely within the library. They can, however, be switched off with either using the compiler flag `-fno-exceptions` or by defining the symbol `JSON_NOEXCEPTION`. In this case, exceptions are replaced by an `abort()` call.
|
||||
- By default, the library does not preserve the **insertion order of object elements**. This is standards-compliant, as the [JSON standard](https://tools.ietf.org/html/rfc7159.html) defines objects as "an unordered collection of zero or more name/value pairs". If you do want to preserve the insertion order, you can specialize the object type with containers like [`tsl::ordered_map`](https://github.com/Tessil/ordered-map) ([integration](https://github.com/nlohmann/json/issues/546#issuecomment-304447518)) or [`nlohmann::fifo_map`](https://github.com/nlohmann/fifo_map) ([integration](https://github.com/nlohmann/json/issues/485#issuecomment-333652309)).
|
||||
|
||||
|
||||
## Execute unit tests
|
||||
|
||||
|
|
51
appveyor.yml
51
appveyor.yml
|
@ -3,6 +3,21 @@ version: '{build}'
|
|||
environment:
|
||||
matrix:
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
|
||||
configuration: Debug
|
||||
platform: x86
|
||||
CXX_FLAGS: ""
|
||||
LINKER_FLAGS: ""
|
||||
GENERATOR: Visual Studio 14 2015
|
||||
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
configuration: Debug
|
||||
platform: x86
|
||||
CXX_FLAGS: ""
|
||||
LINKER_FLAGS: ""
|
||||
GENERATOR: Visual Studio 15 2017
|
||||
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
|
||||
configuration: Debug
|
||||
COMPILER: mingw
|
||||
platform: x86
|
||||
CXX_FLAGS: ""
|
||||
|
@ -10,36 +25,58 @@ environment:
|
|||
GENERATOR: Ninja
|
||||
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
|
||||
configuration: Release
|
||||
COMPILER: mingw
|
||||
platform: x86
|
||||
CXX_FLAGS: ""
|
||||
LINKER_FLAGS: ""
|
||||
GENERATOR: Ninja
|
||||
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
|
||||
configuration: Release
|
||||
platform: x86
|
||||
CXX_FLAGS: ""
|
||||
LINKER_FLAGS: ""
|
||||
GENERATOR: Visual Studio 14 2015
|
||||
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
|
||||
configuration: Release
|
||||
platform: x86
|
||||
name: with_win_header
|
||||
CXX_FLAGS: ""
|
||||
LINKER_FLAGS: ""
|
||||
GENERATOR: Visual Studio 14 2015
|
||||
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
configuration: Release
|
||||
platform: x86
|
||||
CXX_FLAGS: ""
|
||||
LINKER_FLAGS: ""
|
||||
GENERATOR: Visual Studio 15 2017
|
||||
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
configuration: Release
|
||||
platform: x86
|
||||
CXX_FLAGS: "/permissive- /std:c++latest /utf-8"
|
||||
LINKER_FLAGS: ""
|
||||
GENERATOR: Visual Studio 15 2017
|
||||
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
|
||||
configuration: Release
|
||||
platform: x64
|
||||
CXX_FLAGS: ""
|
||||
LINKER_FLAGS: ""
|
||||
GENERATOR: Visual Studio 14 2015 Win64
|
||||
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
configuration: Release
|
||||
platform: x64
|
||||
CXX_FLAGS: ""
|
||||
LINKER_FLAGS: ""
|
||||
GENERATOR: Visual Studio 15 2017 Win64
|
||||
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
configuration: Release
|
||||
platform: x64
|
||||
CXX_FLAGS: "/permissive- /std:c++latest /utf-8 /F4000000"
|
||||
LINKER_FLAGS: "/STACK:4000000"
|
||||
|
@ -57,10 +94,18 @@ install:
|
|||
- if "%COMPILER%"=="mingw" g++ --version
|
||||
|
||||
before_build:
|
||||
- cmake . -G "%GENERATOR%" -DCMAKE_CXX_FLAGS="%CXX_FLAGS%" -DCMAKE_EXE_LINKER_FLAGS="%LINKER_FLAGS%" -DCMAKE_IGNORE_PATH="C:/Program Files/Git/usr/bin"
|
||||
# for with_win_header build, inject the inclusion of Windows.h to the single-header library
|
||||
- ps: if ($env:name -Eq "with_win_header") { $header_path = "single_include\nlohmann\json.hpp" }
|
||||
- ps: if ($env:name -Eq "with_win_header") { "#include <Windows.h>`n" + (Get-Content $header_path | Out-String) | Set-Content $header_path }
|
||||
- cmake . -G "%GENERATOR%" -DCMAKE_BUILD_TYPE="%configuration%" -DCMAKE_CXX_FLAGS="%CXX_FLAGS%" -DCMAKE_EXE_LINKER_FLAGS="%LINKER_FLAGS%" -DCMAKE_IGNORE_PATH="C:/Program Files/Git/usr/bin"
|
||||
|
||||
build_script:
|
||||
- cmake --build . --config Release
|
||||
- cmake --build . --config "%configuration%"
|
||||
|
||||
test_script:
|
||||
- ctest -C Release -V -j
|
||||
- if "%configuration%"=="Release" ctest -C "%configuration%" -V -j
|
||||
# On Debug builds, skip test-unicode_all
|
||||
# as it is extremely slow to run and cause
|
||||
# occasional timeouts on AppVeyor.
|
||||
# More info: https://github.com/nlohmann/json/pull/1570
|
||||
- if "%configuration%"=="Debug" ctest --exclude-regex "test-unicode_all" -C "%configuration%" -V -j
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
#---------------------------------------------------------------------------
|
||||
DOXYFILE_ENCODING = UTF-8
|
||||
PROJECT_NAME = "JSON for Modern C++"
|
||||
PROJECT_NUMBER = 3.6.1
|
||||
PROJECT_NUMBER = 3.7.0
|
||||
PROJECT_BRIEF =
|
||||
PROJECT_LOGO =
|
||||
OUTPUT_DIRECTORY = .
|
||||
|
|
|
@ -61,6 +61,8 @@ doxygen: create_output create_links
|
|||
$(SED) -i 's@template<template< typename U, typename V, typename... Args > class ObjectType = std::map, template< typename U, typename... Args > class ArrayType = std::vector, class StringType = std::string, class BooleanType = bool, class NumberIntegerType = std::int64_t, class NumberUnsignedType = std::uint64_t, class NumberFloatType = double, template< typename U > class AllocatorType = std::allocator, template< typename T, typename SFINAE=void > class JSONSerializer = adl_serializer>@@g' html/*.html
|
||||
$(SED) -i 's@< ObjectType, ArrayType, StringType, BooleanType, NumberIntegerType, NumberUnsignedType, NumberFloatType, AllocatorType, JSONSerializer >@@g' html/*.html
|
||||
$(SED) -i 's@< ObjectType, ArrayType, StringType, BooleanType, NumberIntegerType, NumberUnsignedType, NumberFloatType, AllocatorType, JSONSerializer >@@g' html/*.html
|
||||
$(SED) -i 's@JSON_HEDLEY_RETURNS_NON_NULL@@g' html/*.html
|
||||
$(SED) -i 's@JSON_HEDLEY_WARN_UNUSED_RESULT@@g' html/*.html
|
||||
|
||||
upload: clean doxygen check_output
|
||||
scripts/git-update-ghpages nlohmann/json html
|
||||
|
|
|
@ -1 +1 @@
|
|||
<a target="_blank" href="https://wandbox.org/permlink/uAyi8CvcGwzGQ406"><b>online</b></a>
|
||||
<a target="_blank" href="https://wandbox.org/permlink/tD2mLxlcQZLrIvFW"><b>online</b></a>
|
45
doc/examples/contains_json_pointer.cpp
Normal file
45
doc/examples/contains_json_pointer.cpp
Normal file
|
@ -0,0 +1,45 @@
|
|||
#include <iostream>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
using json = nlohmann::json;
|
||||
|
||||
int main()
|
||||
{
|
||||
// create a JSON value
|
||||
json j =
|
||||
{
|
||||
{"number", 1}, {"string", "foo"}, {"array", {1, 2}}
|
||||
};
|
||||
|
||||
std::cout << std::boolalpha
|
||||
<< j.contains("/number"_json_pointer) << '\n'
|
||||
<< j.contains("/string"_json_pointer) << '\n'
|
||||
<< j.contains("/string"_json_pointer) << '\n'
|
||||
<< j.contains("/array"_json_pointer) << '\n'
|
||||
<< j.contains("/array/1"_json_pointer) << '\n'
|
||||
<< j.contains("/array/-"_json_pointer) << '\n'
|
||||
<< j.contains("/array/4"_json_pointer) << '\n'
|
||||
<< j.contains("/baz"_json_pointer) << std::endl;
|
||||
|
||||
// out_of_range.106
|
||||
try
|
||||
{
|
||||
// try to use an array index with leading '0'
|
||||
j.contains("/array/01"_json_pointer);
|
||||
}
|
||||
catch (json::parse_error& e)
|
||||
{
|
||||
std::cout << e.what() << '\n';
|
||||
}
|
||||
|
||||
// out_of_range.109
|
||||
try
|
||||
{
|
||||
// try to use an array index that is not a number
|
||||
j.contains("/array/one"_json_pointer);
|
||||
}
|
||||
catch (json::parse_error& e)
|
||||
{
|
||||
std::cout << e.what() << '\n';
|
||||
}
|
||||
}
|
1
doc/examples/contains_json_pointer.link
Normal file
1
doc/examples/contains_json_pointer.link
Normal file
|
@ -0,0 +1 @@
|
|||
<a target="_blank" href="https://wandbox.org/permlink/3TJ79OzHP4vmN1Nb"><b>online</b></a>
|
10
doc/examples/contains_json_pointer.output
Normal file
10
doc/examples/contains_json_pointer.output
Normal file
|
@ -0,0 +1,10 @@
|
|||
true
|
||||
true
|
||||
true
|
||||
true
|
||||
true
|
||||
false
|
||||
false
|
||||
false
|
||||
[json.exception.parse_error.106] parse error: array index '01' must not begin with '0'
|
||||
[json.exception.parse_error.109] parse error: array index 'one' is not a number
|
|
@ -2,7 +2,7 @@
|
|||
"compiler": {
|
||||
"c++": "201103",
|
||||
"family": "clang",
|
||||
"version": "10.0.0 (clang-1000.11.45.5)"
|
||||
"version": "10.0.1 (clang-1001.0.46.4)"
|
||||
},
|
||||
"copyright": "(C) 2013-2017 Niels Lohmann",
|
||||
"name": "JSON for Modern C++",
|
||||
|
@ -10,8 +10,8 @@
|
|||
"url": "https://github.com/nlohmann/json",
|
||||
"version": {
|
||||
"major": 3,
|
||||
"minor": 6,
|
||||
"patch": 1,
|
||||
"string": "3.6.1"
|
||||
"minor": 7,
|
||||
"patch": 0,
|
||||
"string": "3.7.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,7 +4,9 @@ These pages contain the API documentation of JSON for Modern C++, a C++11 header
|
|||
|
||||
# Contents
|
||||
|
||||
- @link nlohmann::basic_json `basic_json` class @endlink
|
||||
- Classes
|
||||
- @link nlohmann::basic_json `basic_json`@endlink -- class template for JSON values
|
||||
- @link nlohmann::json `json`@endlink -- the default specialization of `basic_json`, defined as `basic_json<>`
|
||||
- [Functions](functions_func.html)
|
||||
- object inspection
|
||||
- @link nlohmann::basic_json::dump dump @endlink -- value serialization
|
||||
|
@ -327,4 +329,4 @@ Note that this table only lists those exceptions thrown due to the type. For ins
|
|||
@author [Niels Lohmann](http://nlohmann.me)
|
||||
@see https://github.com/nlohmann/json to download the source code
|
||||
|
||||
@version 3.6.1
|
||||
@version 3.7.0
|
||||
|
|
BIN
doc/json.gif
BIN
doc/json.gif
Binary file not shown.
Before Width: | Height: | Size: 1.6 MiB After Width: | Height: | Size: 1.6 MiB |
|
@ -26,7 +26,7 @@ namespace detail
|
|||
template<typename BasicJsonType>
|
||||
void from_json(const BasicJsonType& j, typename std::nullptr_t& n)
|
||||
{
|
||||
if (JSON_UNLIKELY(not j.is_null()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_null()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be null, but is " + std::string(j.type_name())));
|
||||
}
|
||||
|
@ -66,7 +66,7 @@ void get_arithmetic_value(const BasicJsonType& j, ArithmeticType& val)
|
|||
template<typename BasicJsonType>
|
||||
void from_json(const BasicJsonType& j, typename BasicJsonType::boolean_t& b)
|
||||
{
|
||||
if (JSON_UNLIKELY(not j.is_boolean()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_boolean()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be boolean, but is " + std::string(j.type_name())));
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ void from_json(const BasicJsonType& j, typename BasicJsonType::boolean_t& b)
|
|||
template<typename BasicJsonType>
|
||||
void from_json(const BasicJsonType& j, typename BasicJsonType::string_t& s)
|
||||
{
|
||||
if (JSON_UNLIKELY(not j.is_string()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_string()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be string, but is " + std::string(j.type_name())));
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ template <
|
|||
int > = 0 >
|
||||
void from_json(const BasicJsonType& j, ConstructibleStringType& s)
|
||||
{
|
||||
if (JSON_UNLIKELY(not j.is_string()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_string()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be string, but is " + std::string(j.type_name())));
|
||||
}
|
||||
|
@ -132,10 +132,11 @@ template<typename BasicJsonType, typename T, typename Allocator,
|
|||
enable_if_t<std::is_convertible<BasicJsonType, T>::value, int> = 0>
|
||||
void from_json(const BasicJsonType& j, std::forward_list<T, Allocator>& l)
|
||||
{
|
||||
if (JSON_UNLIKELY(not j.is_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_array()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(j.type_name())));
|
||||
}
|
||||
l.clear();
|
||||
std::transform(j.rbegin(), j.rend(),
|
||||
std::front_inserter(l), [](const BasicJsonType & i)
|
||||
{
|
||||
|
@ -148,7 +149,7 @@ template<typename BasicJsonType, typename T,
|
|||
enable_if_t<std::is_convertible<BasicJsonType, T>::value, int> = 0>
|
||||
void from_json(const BasicJsonType& j, std::valarray<T>& l)
|
||||
{
|
||||
if (JSON_UNLIKELY(not j.is_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_array()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(j.type_name())));
|
||||
}
|
||||
|
@ -156,6 +157,16 @@ void from_json(const BasicJsonType& j, std::valarray<T>& l)
|
|||
std::copy(j.m_value.array->begin(), j.m_value.array->end(), std::begin(l));
|
||||
}
|
||||
|
||||
template <typename BasicJsonType, typename T, std::size_t N>
|
||||
auto from_json(const BasicJsonType& j, T (&arr)[N])
|
||||
-> decltype(j.template get<T>(), void())
|
||||
{
|
||||
for (std::size_t i = 0; i < N; ++i)
|
||||
{
|
||||
arr[i] = j.at(i).template get<T>();
|
||||
}
|
||||
}
|
||||
|
||||
template<typename BasicJsonType>
|
||||
void from_json_array_impl(const BasicJsonType& j, typename BasicJsonType::array_t& arr, priority_tag<3> /*unused*/)
|
||||
{
|
||||
|
@ -182,14 +193,16 @@ auto from_json_array_impl(const BasicJsonType& j, ConstructibleArrayType& arr, p
|
|||
{
|
||||
using std::end;
|
||||
|
||||
arr.reserve(j.size());
|
||||
ConstructibleArrayType ret;
|
||||
ret.reserve(j.size());
|
||||
std::transform(j.begin(), j.end(),
|
||||
std::inserter(arr, end(arr)), [](const BasicJsonType & i)
|
||||
std::inserter(ret, end(ret)), [](const BasicJsonType & i)
|
||||
{
|
||||
// get<BasicJsonType>() returns *this, this won't call a from_json
|
||||
// method when value_type is BasicJsonType
|
||||
return i.template get<typename ConstructibleArrayType::value_type>();
|
||||
});
|
||||
arr = std::move(ret);
|
||||
}
|
||||
|
||||
template <typename BasicJsonType, typename ConstructibleArrayType>
|
||||
|
@ -198,14 +211,16 @@ void from_json_array_impl(const BasicJsonType& j, ConstructibleArrayType& arr,
|
|||
{
|
||||
using std::end;
|
||||
|
||||
ConstructibleArrayType ret;
|
||||
std::transform(
|
||||
j.begin(), j.end(), std::inserter(arr, end(arr)),
|
||||
j.begin(), j.end(), std::inserter(ret, end(ret)),
|
||||
[](const BasicJsonType & i)
|
||||
{
|
||||
// get<BasicJsonType>() returns *this, this won't call a from_json
|
||||
// method when value_type is BasicJsonType
|
||||
return i.template get<typename ConstructibleArrayType::value_type>();
|
||||
});
|
||||
arr = std::move(ret);
|
||||
}
|
||||
|
||||
template <typename BasicJsonType, typename ConstructibleArrayType,
|
||||
|
@ -221,7 +236,7 @@ auto from_json(const BasicJsonType& j, ConstructibleArrayType& arr)
|
|||
j.template get<typename ConstructibleArrayType::value_type>(),
|
||||
void())
|
||||
{
|
||||
if (JSON_UNLIKELY(not j.is_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_array()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be array, but is " +
|
||||
std::string(j.type_name())));
|
||||
|
@ -234,20 +249,22 @@ template<typename BasicJsonType, typename ConstructibleObjectType,
|
|||
enable_if_t<is_constructible_object_type<BasicJsonType, ConstructibleObjectType>::value, int> = 0>
|
||||
void from_json(const BasicJsonType& j, ConstructibleObjectType& obj)
|
||||
{
|
||||
if (JSON_UNLIKELY(not j.is_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_object()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be object, but is " + std::string(j.type_name())));
|
||||
}
|
||||
|
||||
ConstructibleObjectType ret;
|
||||
auto inner_object = j.template get_ptr<const typename BasicJsonType::object_t*>();
|
||||
using value_type = typename ConstructibleObjectType::value_type;
|
||||
std::transform(
|
||||
inner_object->begin(), inner_object->end(),
|
||||
std::inserter(obj, obj.begin()),
|
||||
std::inserter(ret, ret.begin()),
|
||||
[](typename BasicJsonType::object_t::value_type const & p)
|
||||
{
|
||||
return value_type(p.first, p.second.template get<typename ConstructibleObjectType::mapped_type>());
|
||||
});
|
||||
obj = std::move(ret);
|
||||
}
|
||||
|
||||
// overload for arithmetic types, not chosen for basic_json template arguments
|
||||
|
@ -315,13 +332,14 @@ template <typename BasicJsonType, typename Key, typename Value, typename Compare
|
|||
typename BasicJsonType::string_t, Key>::value>>
|
||||
void from_json(const BasicJsonType& j, std::map<Key, Value, Compare, Allocator>& m)
|
||||
{
|
||||
if (JSON_UNLIKELY(not j.is_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_array()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(j.type_name())));
|
||||
}
|
||||
m.clear();
|
||||
for (const auto& p : j)
|
||||
{
|
||||
if (JSON_UNLIKELY(not p.is_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not p.is_array()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(p.type_name())));
|
||||
}
|
||||
|
@ -334,13 +352,14 @@ template <typename BasicJsonType, typename Key, typename Value, typename Hash, t
|
|||
typename BasicJsonType::string_t, Key>::value>>
|
||||
void from_json(const BasicJsonType& j, std::unordered_map<Key, Value, Hash, KeyEqual, Allocator>& m)
|
||||
{
|
||||
if (JSON_UNLIKELY(not j.is_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_array()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(j.type_name())));
|
||||
}
|
||||
m.clear();
|
||||
for (const auto& p : j)
|
||||
{
|
||||
if (JSON_UNLIKELY(not p.is_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not p.is_array()))
|
||||
{
|
||||
JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(p.type_name())));
|
||||
}
|
||||
|
@ -367,4 +386,4 @@ namespace
|
|||
{
|
||||
constexpr const auto& from_json = detail::static_const<detail::from_json_fn>::value;
|
||||
} // namespace
|
||||
} // namespace nlohmann
|
||||
} // namespace nlohmann
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include <cstring> // memcpy, memmove
|
||||
#include <limits> // numeric_limits
|
||||
#include <type_traits> // conditional
|
||||
#include <nlohmann/detail/macro_scope.hpp>
|
||||
|
||||
namespace nlohmann
|
||||
{
|
||||
|
@ -818,6 +819,7 @@ v = buf * 10^decimal_exponent
|
|||
len is the length of the buffer (number of decimal digits)
|
||||
The buffer must be large enough, i.e. >= max_digits10.
|
||||
*/
|
||||
JSON_HEDLEY_NON_NULL(1)
|
||||
inline void grisu2(char* buf, int& len, int& decimal_exponent,
|
||||
diyfp m_minus, diyfp v, diyfp m_plus)
|
||||
{
|
||||
|
@ -877,6 +879,7 @@ len is the length of the buffer (number of decimal digits)
|
|||
The buffer must be large enough, i.e. >= max_digits10.
|
||||
*/
|
||||
template <typename FloatType>
|
||||
JSON_HEDLEY_NON_NULL(1)
|
||||
void grisu2(char* buf, int& len, int& decimal_exponent, FloatType value)
|
||||
{
|
||||
static_assert(diyfp::kPrecision >= std::numeric_limits<FloatType>::digits + 3,
|
||||
|
@ -915,6 +918,8 @@ void grisu2(char* buf, int& len, int& decimal_exponent, FloatType value)
|
|||
@return a pointer to the element following the exponent.
|
||||
@pre -1000 < e < 1000
|
||||
*/
|
||||
JSON_HEDLEY_NON_NULL(1)
|
||||
JSON_HEDLEY_RETURNS_NON_NULL
|
||||
inline char* append_exponent(char* buf, int e)
|
||||
{
|
||||
assert(e > -1000);
|
||||
|
@ -965,6 +970,8 @@ notation. Otherwise it will be printed in exponential notation.
|
|||
@pre min_exp < 0
|
||||
@pre max_exp > 0
|
||||
*/
|
||||
JSON_HEDLEY_NON_NULL(1)
|
||||
JSON_HEDLEY_RETURNS_NON_NULL
|
||||
inline char* format_buffer(char* buf, int len, int decimal_exponent,
|
||||
int min_exp, int max_exp)
|
||||
{
|
||||
|
@ -1048,6 +1055,8 @@ format. Returns an iterator pointing past-the-end of the decimal representation.
|
|||
@note The result is NOT null-terminated.
|
||||
*/
|
||||
template <typename FloatType>
|
||||
JSON_HEDLEY_NON_NULL(1, 2)
|
||||
JSON_HEDLEY_RETURNS_NON_NULL
|
||||
char* to_chars(char* first, const char* last, FloatType value)
|
||||
{
|
||||
static_cast<void>(last); // maybe unused - fix warning
|
||||
|
|
|
@ -341,4 +341,4 @@ namespace
|
|||
{
|
||||
constexpr const auto& to_json = detail::static_const<detail::to_json_fn>::value;
|
||||
} // namespace
|
||||
} // namespace nlohmann
|
||||
} // namespace nlohmann
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
#include <string> // to_string
|
||||
|
||||
#include <nlohmann/detail/input/position_t.hpp>
|
||||
#include <nlohmann/detail/macro_scope.hpp>
|
||||
|
||||
namespace nlohmann
|
||||
{
|
||||
|
@ -46,6 +47,7 @@ class exception : public std::exception
|
|||
{
|
||||
public:
|
||||
/// returns the explanatory string
|
||||
JSON_HEDLEY_RETURNS_NON_NULL
|
||||
const char* what() const noexcept override
|
||||
{
|
||||
return m.what();
|
||||
|
@ -55,6 +57,7 @@ class exception : public std::exception
|
|||
const int id;
|
||||
|
||||
protected:
|
||||
JSON_HEDLEY_NON_NULL(3)
|
||||
exception(int id_, const char* what_arg) : id(id_), m(what_arg) {}
|
||||
|
||||
static std::string name(const std::string& ename, int id_)
|
||||
|
@ -207,6 +210,7 @@ class invalid_iterator : public exception
|
|||
}
|
||||
|
||||
private:
|
||||
JSON_HEDLEY_NON_NULL(3)
|
||||
invalid_iterator(int id_, const char* what_arg)
|
||||
: exception(id_, what_arg) {}
|
||||
};
|
||||
|
@ -260,6 +264,7 @@ class type_error : public exception
|
|||
}
|
||||
|
||||
private:
|
||||
JSON_HEDLEY_NON_NULL(3)
|
||||
type_error(int id_, const char* what_arg) : exception(id_, what_arg) {}
|
||||
};
|
||||
|
||||
|
@ -306,6 +311,7 @@ class out_of_range : public exception
|
|||
}
|
||||
|
||||
private:
|
||||
JSON_HEDLEY_NON_NULL(3)
|
||||
out_of_range(int id_, const char* what_arg) : exception(id_, what_arg) {}
|
||||
};
|
||||
|
||||
|
@ -343,6 +349,7 @@ class other_error : public exception
|
|||
}
|
||||
|
||||
private:
|
||||
JSON_HEDLEY_NON_NULL(3)
|
||||
other_error(int id_, const char* what_arg) : exception(id_, what_arg) {}
|
||||
};
|
||||
} // namespace detail
|
||||
|
|
|
@ -66,6 +66,7 @@ class binary_reader
|
|||
|
||||
@return
|
||||
*/
|
||||
JSON_HEDLEY_NON_NULL(3)
|
||||
bool sax_parse(const input_format_t format,
|
||||
json_sax_t* sax_,
|
||||
const bool strict = true)
|
||||
|
@ -107,7 +108,7 @@ class binary_reader
|
|||
get();
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(current != std::char_traits<char>::eof()))
|
||||
if (JSON_HEDLEY_UNLIKELY(current != std::char_traits<char>::eof()))
|
||||
{
|
||||
return sax->parse_error(chars_read, get_token_string(),
|
||||
parse_error::create(110, chars_read, exception_message(format, "expected end of input; last byte: 0x" + get_token_string(), "value")));
|
||||
|
@ -143,12 +144,12 @@ class binary_reader
|
|||
std::int32_t document_size;
|
||||
get_number<std::int32_t, true>(input_format_t::bson, document_size);
|
||||
|
||||
if (JSON_UNLIKELY(not sax->start_object(std::size_t(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_object(std::size_t(-1))))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not parse_bson_element_list(/*is_array*/false)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_bson_element_list(/*is_array*/false)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -169,7 +170,7 @@ class binary_reader
|
|||
while (true)
|
||||
{
|
||||
get();
|
||||
if (JSON_UNLIKELY(not unexpect_eof(input_format_t::bson, "cstring")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::bson, "cstring")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -197,7 +198,7 @@ class binary_reader
|
|||
template<typename NumberType>
|
||||
bool get_bson_string(const NumberType len, string_t& result)
|
||||
{
|
||||
if (JSON_UNLIKELY(len < 1))
|
||||
if (JSON_HEDLEY_UNLIKELY(len < 1))
|
||||
{
|
||||
auto last_token = get_token_string();
|
||||
return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format_t::bson, "string length must be at least 1, is " + std::to_string(len), "string")));
|
||||
|
@ -292,13 +293,13 @@ class binary_reader
|
|||
string_t key;
|
||||
while (int element_type = get())
|
||||
{
|
||||
if (JSON_UNLIKELY(not unexpect_eof(input_format_t::bson, "element list")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::bson, "element list")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
const std::size_t element_type_parse_position = chars_read;
|
||||
if (JSON_UNLIKELY(not get_bson_cstr(key)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_bson_cstr(key)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -308,7 +309,7 @@ class binary_reader
|
|||
return false;
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not parse_bson_element_internal(element_type, element_type_parse_position)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_bson_element_internal(element_type, element_type_parse_position)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -329,12 +330,12 @@ class binary_reader
|
|||
std::int32_t document_size;
|
||||
get_number<std::int32_t, true>(input_format_t::bson, document_size);
|
||||
|
||||
if (JSON_UNLIKELY(not sax->start_array(std::size_t(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_array(std::size_t(-1))))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not parse_bson_element_list(/*is_array*/true)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_bson_element_list(/*is_array*/true)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -619,12 +620,12 @@ class binary_reader
|
|||
case 0xF9: // Half-Precision Float (two-byte IEEE 754)
|
||||
{
|
||||
const int byte1_raw = get();
|
||||
if (JSON_UNLIKELY(not unexpect_eof(input_format_t::cbor, "number")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::cbor, "number")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
const int byte2_raw = get();
|
||||
if (JSON_UNLIKELY(not unexpect_eof(input_format_t::cbor, "number")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::cbor, "number")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -697,7 +698,7 @@ class binary_reader
|
|||
*/
|
||||
bool get_cbor_string(string_t& result)
|
||||
{
|
||||
if (JSON_UNLIKELY(not unexpect_eof(input_format_t::cbor, "string")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::cbor, "string")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -786,7 +787,7 @@ class binary_reader
|
|||
*/
|
||||
bool get_cbor_array(const std::size_t len)
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->start_array(len)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_array(len)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -795,7 +796,7 @@ class binary_reader
|
|||
{
|
||||
for (std::size_t i = 0; i < len; ++i)
|
||||
{
|
||||
if (JSON_UNLIKELY(not parse_cbor_internal()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_cbor_internal()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -805,7 +806,7 @@ class binary_reader
|
|||
{
|
||||
while (get() != 0xFF)
|
||||
{
|
||||
if (JSON_UNLIKELY(not parse_cbor_internal(false)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_cbor_internal(false)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -822,7 +823,7 @@ class binary_reader
|
|||
*/
|
||||
bool get_cbor_object(const std::size_t len)
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->start_object(len)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_object(len)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -833,12 +834,12 @@ class binary_reader
|
|||
for (std::size_t i = 0; i < len; ++i)
|
||||
{
|
||||
get();
|
||||
if (JSON_UNLIKELY(not get_cbor_string(key) or not sax->key(key)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_cbor_string(key) or not sax->key(key)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not parse_cbor_internal()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_cbor_internal()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -849,12 +850,12 @@ class binary_reader
|
|||
{
|
||||
while (get() != 0xFF)
|
||||
{
|
||||
if (JSON_UNLIKELY(not get_cbor_string(key) or not sax->key(key)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_cbor_string(key) or not sax->key(key)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not parse_cbor_internal()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_cbor_internal()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1238,7 +1239,7 @@ class binary_reader
|
|||
*/
|
||||
bool get_msgpack_string(string_t& result)
|
||||
{
|
||||
if (JSON_UNLIKELY(not unexpect_eof(input_format_t::msgpack, "string")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::msgpack, "string")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1314,14 +1315,14 @@ class binary_reader
|
|||
*/
|
||||
bool get_msgpack_array(const std::size_t len)
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->start_array(len)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_array(len)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
for (std::size_t i = 0; i < len; ++i)
|
||||
{
|
||||
if (JSON_UNLIKELY(not parse_msgpack_internal()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_msgpack_internal()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1336,7 +1337,7 @@ class binary_reader
|
|||
*/
|
||||
bool get_msgpack_object(const std::size_t len)
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->start_object(len)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_object(len)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1345,12 +1346,12 @@ class binary_reader
|
|||
for (std::size_t i = 0; i < len; ++i)
|
||||
{
|
||||
get();
|
||||
if (JSON_UNLIKELY(not get_msgpack_string(key) or not sax->key(key)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_msgpack_string(key) or not sax->key(key)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not parse_msgpack_internal()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_msgpack_internal()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1397,7 +1398,7 @@ class binary_reader
|
|||
get(); // TODO(niels): may we ignore N here?
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not unexpect_eof(input_format_t::ubjson, "value")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::ubjson, "value")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1451,7 +1452,7 @@ class binary_reader
|
|||
case 'U':
|
||||
{
|
||||
std::uint8_t number;
|
||||
if (JSON_UNLIKELY(not get_number(input_format_t::ubjson, number)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_number(input_format_t::ubjson, number)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1462,7 +1463,7 @@ class binary_reader
|
|||
case 'i':
|
||||
{
|
||||
std::int8_t number;
|
||||
if (JSON_UNLIKELY(not get_number(input_format_t::ubjson, number)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_number(input_format_t::ubjson, number)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1473,7 +1474,7 @@ class binary_reader
|
|||
case 'I':
|
||||
{
|
||||
std::int16_t number;
|
||||
if (JSON_UNLIKELY(not get_number(input_format_t::ubjson, number)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_number(input_format_t::ubjson, number)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1484,7 +1485,7 @@ class binary_reader
|
|||
case 'l':
|
||||
{
|
||||
std::int32_t number;
|
||||
if (JSON_UNLIKELY(not get_number(input_format_t::ubjson, number)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_number(input_format_t::ubjson, number)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1495,7 +1496,7 @@ class binary_reader
|
|||
case 'L':
|
||||
{
|
||||
std::int64_t number;
|
||||
if (JSON_UNLIKELY(not get_number(input_format_t::ubjson, number)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_number(input_format_t::ubjson, number)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1531,15 +1532,15 @@ class binary_reader
|
|||
if (current == '$')
|
||||
{
|
||||
result.second = get(); // must not ignore 'N', because 'N' maybe the type
|
||||
if (JSON_UNLIKELY(not unexpect_eof(input_format_t::ubjson, "type")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::ubjson, "type")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
get_ignore_noop();
|
||||
if (JSON_UNLIKELY(current != '#'))
|
||||
if (JSON_HEDLEY_UNLIKELY(current != '#'))
|
||||
{
|
||||
if (JSON_UNLIKELY(not unexpect_eof(input_format_t::ubjson, "value")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::ubjson, "value")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1622,11 +1623,11 @@ class binary_reader
|
|||
case 'C': // char
|
||||
{
|
||||
get();
|
||||
if (JSON_UNLIKELY(not unexpect_eof(input_format_t::ubjson, "char")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::ubjson, "char")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (JSON_UNLIKELY(current > 127))
|
||||
if (JSON_HEDLEY_UNLIKELY(current > 127))
|
||||
{
|
||||
auto last_token = get_token_string();
|
||||
return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format_t::ubjson, "byte after 'C' must be in range 0x00..0x7F; last byte: 0x" + last_token, "char")));
|
||||
|
@ -1661,14 +1662,14 @@ class binary_reader
|
|||
bool get_ubjson_array()
|
||||
{
|
||||
std::pair<std::size_t, int> size_and_type;
|
||||
if (JSON_UNLIKELY(not get_ubjson_size_type(size_and_type)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_ubjson_size_type(size_and_type)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (size_and_type.first != string_t::npos)
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->start_array(size_and_type.first)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_array(size_and_type.first)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1679,7 +1680,7 @@ class binary_reader
|
|||
{
|
||||
for (std::size_t i = 0; i < size_and_type.first; ++i)
|
||||
{
|
||||
if (JSON_UNLIKELY(not get_ubjson_value(size_and_type.second)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_ubjson_value(size_and_type.second)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1690,7 +1691,7 @@ class binary_reader
|
|||
{
|
||||
for (std::size_t i = 0; i < size_and_type.first; ++i)
|
||||
{
|
||||
if (JSON_UNLIKELY(not parse_ubjson_internal()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_ubjson_internal()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1699,14 +1700,14 @@ class binary_reader
|
|||
}
|
||||
else
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->start_array(std::size_t(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_array(std::size_t(-1))))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
while (current != ']')
|
||||
{
|
||||
if (JSON_UNLIKELY(not parse_ubjson_internal(false)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_ubjson_internal(false)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1723,7 +1724,7 @@ class binary_reader
|
|||
bool get_ubjson_object()
|
||||
{
|
||||
std::pair<std::size_t, int> size_and_type;
|
||||
if (JSON_UNLIKELY(not get_ubjson_size_type(size_and_type)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_ubjson_size_type(size_and_type)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1731,7 +1732,7 @@ class binary_reader
|
|||
string_t key;
|
||||
if (size_and_type.first != string_t::npos)
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->start_object(size_and_type.first)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_object(size_and_type.first)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1740,11 +1741,11 @@ class binary_reader
|
|||
{
|
||||
for (std::size_t i = 0; i < size_and_type.first; ++i)
|
||||
{
|
||||
if (JSON_UNLIKELY(not get_ubjson_string(key) or not sax->key(key)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_ubjson_string(key) or not sax->key(key)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (JSON_UNLIKELY(not get_ubjson_value(size_and_type.second)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_ubjson_value(size_and_type.second)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1755,11 +1756,11 @@ class binary_reader
|
|||
{
|
||||
for (std::size_t i = 0; i < size_and_type.first; ++i)
|
||||
{
|
||||
if (JSON_UNLIKELY(not get_ubjson_string(key) or not sax->key(key)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_ubjson_string(key) or not sax->key(key)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (JSON_UNLIKELY(not parse_ubjson_internal()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_ubjson_internal()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1769,18 +1770,18 @@ class binary_reader
|
|||
}
|
||||
else
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->start_object(std::size_t(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_object(std::size_t(-1))))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
while (current != '}')
|
||||
{
|
||||
if (JSON_UNLIKELY(not get_ubjson_string(key, false) or not sax->key(key)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not get_ubjson_string(key, false) or not sax->key(key)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (JSON_UNLIKELY(not parse_ubjson_internal()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not parse_ubjson_internal()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1846,7 +1847,7 @@ class binary_reader
|
|||
for (std::size_t i = 0; i < sizeof(NumberType); ++i)
|
||||
{
|
||||
get();
|
||||
if (JSON_UNLIKELY(not unexpect_eof(format, "number")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(format, "number")))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -1890,7 +1891,7 @@ class binary_reader
|
|||
std::generate_n(std::back_inserter(result), len, [this, &success, &format]()
|
||||
{
|
||||
get();
|
||||
if (JSON_UNLIKELY(not unexpect_eof(format, "string")))
|
||||
if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(format, "string")))
|
||||
{
|
||||
success = false;
|
||||
}
|
||||
|
@ -1904,9 +1905,10 @@ class binary_reader
|
|||
@param[in] context further context information (for diagnostics)
|
||||
@return whether the last read character is not EOF
|
||||
*/
|
||||
JSON_HEDLEY_NON_NULL(3)
|
||||
bool unexpect_eof(const input_format_t format, const char* context) const
|
||||
{
|
||||
if (JSON_UNLIKELY(current == std::char_traits<char>::eof()))
|
||||
if (JSON_HEDLEY_UNLIKELY(current == std::char_traits<char>::eof()))
|
||||
{
|
||||
return sax->parse_error(chars_read, "<end of file>",
|
||||
parse_error::create(110, chars_read, exception_message(format, "unexpected end of input", context)));
|
||||
|
|
|
@ -55,6 +55,7 @@ Input adapter for stdio file access. This adapter read only 1 byte and do not us
|
|||
class file_input_adapter : public input_adapter_protocol
|
||||
{
|
||||
public:
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
explicit file_input_adapter(std::FILE* f) noexcept
|
||||
: m_file(f)
|
||||
{}
|
||||
|
@ -130,6 +131,7 @@ class input_stream_adapter : public input_adapter_protocol
|
|||
class input_buffer_adapter : public input_adapter_protocol
|
||||
{
|
||||
public:
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
input_buffer_adapter(const char* b, const std::size_t l) noexcept
|
||||
: cursor(b), limit(b + l)
|
||||
{}
|
||||
|
@ -143,7 +145,7 @@ class input_buffer_adapter : public input_adapter_protocol
|
|||
|
||||
std::char_traits<char>::int_type get_character() noexcept override
|
||||
{
|
||||
if (JSON_LIKELY(cursor < limit))
|
||||
if (JSON_HEDLEY_LIKELY(cursor < limit))
|
||||
{
|
||||
return std::char_traits<char>::to_int_type(*(cursor++));
|
||||
}
|
||||
|
@ -333,6 +335,7 @@ class input_adapter
|
|||
{
|
||||
public:
|
||||
// native support
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
input_adapter(std::FILE* file)
|
||||
: ia(std::make_shared<file_input_adapter>(file)) {}
|
||||
/// input adapter for input stream
|
||||
|
@ -401,7 +404,7 @@ class input_adapter
|
|||
"each element in the iterator range must have the size of 1 byte");
|
||||
|
||||
const auto len = static_cast<size_t>(std::distance(first, last));
|
||||
if (JSON_LIKELY(len > 0))
|
||||
if (JSON_HEDLEY_LIKELY(len > 0))
|
||||
{
|
||||
// there is at least one element: use the address of first
|
||||
ia = std::make_shared<input_buffer_adapter>(reinterpret_cast<const char*>(&(*first)), len);
|
||||
|
|
|
@ -206,7 +206,7 @@ class json_sax_dom_parser
|
|||
{
|
||||
ref_stack.push_back(handle_value(BasicJsonType::value_t::object));
|
||||
|
||||
if (JSON_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
if (JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408,
|
||||
"excessive object size: " + std::to_string(len)));
|
||||
|
@ -232,7 +232,7 @@ class json_sax_dom_parser
|
|||
{
|
||||
ref_stack.push_back(handle_value(BasicJsonType::value_t::array));
|
||||
|
||||
if (JSON_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
if (JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408,
|
||||
"excessive array size: " + std::to_string(len)));
|
||||
|
@ -288,6 +288,7 @@ class json_sax_dom_parser
|
|||
object to which we can add elements
|
||||
*/
|
||||
template<typename Value>
|
||||
JSON_HEDLEY_RETURNS_NON_NULL
|
||||
BasicJsonType* handle_value(Value&& v)
|
||||
{
|
||||
if (ref_stack.empty())
|
||||
|
@ -394,7 +395,7 @@ class json_sax_dom_callback_parser
|
|||
ref_stack.push_back(val.second);
|
||||
|
||||
// check object limit
|
||||
if (ref_stack.back() and JSON_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
if (ref_stack.back() and JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408, "excessive object size: " + std::to_string(len)));
|
||||
}
|
||||
|
@ -457,7 +458,7 @@ class json_sax_dom_callback_parser
|
|||
ref_stack.push_back(val.second);
|
||||
|
||||
// check array limit
|
||||
if (ref_stack.back() and JSON_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
if (ref_stack.back() and JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) and len > ref_stack.back()->max_size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(408, "excessive array size: " + std::to_string(len)));
|
||||
}
|
||||
|
|
|
@ -59,6 +59,8 @@ class lexer
|
|||
};
|
||||
|
||||
/// return name of values of type token_type (only used for errors)
|
||||
JSON_HEDLEY_RETURNS_NON_NULL
|
||||
JSON_HEDLEY_CONST
|
||||
static const char* token_type_name(const token_type t) noexcept
|
||||
{
|
||||
switch (t)
|
||||
|
@ -118,6 +120,7 @@ class lexer
|
|||
/////////////////////
|
||||
|
||||
/// return the locale-dependent decimal point
|
||||
JSON_HEDLEY_PURE
|
||||
static char get_decimal_point() noexcept
|
||||
{
|
||||
const auto loc = localeconv();
|
||||
|
@ -200,7 +203,7 @@ class lexer
|
|||
for (auto range = ranges.begin(); range != ranges.end(); ++range)
|
||||
{
|
||||
get();
|
||||
if (JSON_LIKELY(*range <= current and current <= *(++range)))
|
||||
if (JSON_HEDLEY_LIKELY(*range <= current and current <= *(++range)))
|
||||
{
|
||||
add(current);
|
||||
}
|
||||
|
@ -299,7 +302,7 @@ class lexer
|
|||
const int codepoint1 = get_codepoint();
|
||||
int codepoint = codepoint1; // start with codepoint1
|
||||
|
||||
if (JSON_UNLIKELY(codepoint1 == -1))
|
||||
if (JSON_HEDLEY_UNLIKELY(codepoint1 == -1))
|
||||
{
|
||||
error_message = "invalid string: '\\u' must be followed by 4 hex digits";
|
||||
return token_type::parse_error;
|
||||
|
@ -309,18 +312,18 @@ class lexer
|
|||
if (0xD800 <= codepoint1 and codepoint1 <= 0xDBFF)
|
||||
{
|
||||
// expect next \uxxxx entry
|
||||
if (JSON_LIKELY(get() == '\\' and get() == 'u'))
|
||||
if (JSON_HEDLEY_LIKELY(get() == '\\' and get() == 'u'))
|
||||
{
|
||||
const int codepoint2 = get_codepoint();
|
||||
|
||||
if (JSON_UNLIKELY(codepoint2 == -1))
|
||||
if (JSON_HEDLEY_UNLIKELY(codepoint2 == -1))
|
||||
{
|
||||
error_message = "invalid string: '\\u' must be followed by 4 hex digits";
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
||||
// check if codepoint2 is a low surrogate
|
||||
if (JSON_LIKELY(0xDC00 <= codepoint2 and codepoint2 <= 0xDFFF))
|
||||
if (JSON_HEDLEY_LIKELY(0xDC00 <= codepoint2 and codepoint2 <= 0xDFFF))
|
||||
{
|
||||
// overwrite codepoint
|
||||
codepoint = static_cast<int>(
|
||||
|
@ -347,7 +350,7 @@ class lexer
|
|||
}
|
||||
else
|
||||
{
|
||||
if (JSON_UNLIKELY(0xDC00 <= codepoint1 and codepoint1 <= 0xDFFF))
|
||||
if (JSON_HEDLEY_UNLIKELY(0xDC00 <= codepoint1 and codepoint1 <= 0xDFFF))
|
||||
{
|
||||
error_message = "invalid string: surrogate U+DC00..U+DFFF must follow U+D800..U+DBFF";
|
||||
return token_type::parse_error;
|
||||
|
@ -722,7 +725,7 @@ class lexer
|
|||
case 0xDE:
|
||||
case 0xDF:
|
||||
{
|
||||
if (JSON_UNLIKELY(not next_byte_in_range({0x80, 0xBF})))
|
||||
if (JSON_HEDLEY_UNLIKELY(not next_byte_in_range({0x80, 0xBF})))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
@ -732,7 +735,7 @@ class lexer
|
|||
// U+0800..U+0FFF: bytes E0 A0..BF 80..BF
|
||||
case 0xE0:
|
||||
{
|
||||
if (JSON_UNLIKELY(not (next_byte_in_range({0xA0, 0xBF, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0xA0, 0xBF, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
@ -756,7 +759,7 @@ class lexer
|
|||
case 0xEE:
|
||||
case 0xEF:
|
||||
{
|
||||
if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
@ -766,7 +769,7 @@ class lexer
|
|||
// U+D000..U+D7FF: bytes ED 80..9F 80..BF
|
||||
case 0xED:
|
||||
{
|
||||
if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0x9F, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0x80, 0x9F, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
@ -776,7 +779,7 @@ class lexer
|
|||
// U+10000..U+3FFFF F0 90..BF 80..BF 80..BF
|
||||
case 0xF0:
|
||||
{
|
||||
if (JSON_UNLIKELY(not (next_byte_in_range({0x90, 0xBF, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0x90, 0xBF, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
@ -788,7 +791,7 @@ class lexer
|
|||
case 0xF2:
|
||||
case 0xF3:
|
||||
{
|
||||
if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
@ -798,7 +801,7 @@ class lexer
|
|||
// U+100000..U+10FFFF F4 80..8F 80..BF 80..BF
|
||||
case 0xF4:
|
||||
{
|
||||
if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0x8F, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not (next_byte_in_range({0x80, 0x8F, 0x80, 0xBF, 0x80, 0xBF}))))
|
||||
{
|
||||
return token_type::parse_error;
|
||||
}
|
||||
|
@ -815,16 +818,19 @@ class lexer
|
|||
}
|
||||
}
|
||||
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
static void strtof(float& f, const char* str, char** endptr) noexcept
|
||||
{
|
||||
f = std::strtof(str, endptr);
|
||||
}
|
||||
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
static void strtof(double& f, const char* str, char** endptr) noexcept
|
||||
{
|
||||
f = std::strtod(str, endptr);
|
||||
}
|
||||
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
static void strtof(long double& f, const char* str, char** endptr) noexcept
|
||||
{
|
||||
f = std::strtold(str, endptr);
|
||||
|
@ -1200,13 +1206,14 @@ scan_number_done:
|
|||
@param[in] length the length of the passed literal text
|
||||
@param[in] return_type the token type to return on success
|
||||
*/
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
token_type scan_literal(const char* literal_text, const std::size_t length,
|
||||
token_type return_type)
|
||||
{
|
||||
assert(current == literal_text[0]);
|
||||
for (std::size_t i = 1; i < length; ++i)
|
||||
{
|
||||
if (JSON_UNLIKELY(get() != literal_text[i]))
|
||||
if (JSON_HEDLEY_UNLIKELY(get() != literal_text[i]))
|
||||
{
|
||||
error_message = "invalid literal";
|
||||
return token_type::parse_error;
|
||||
|
@ -1252,7 +1259,7 @@ scan_number_done:
|
|||
current = ia->get_character();
|
||||
}
|
||||
|
||||
if (JSON_LIKELY(current != std::char_traits<char>::eof()))
|
||||
if (JSON_HEDLEY_LIKELY(current != std::char_traits<char>::eof()))
|
||||
{
|
||||
token_string.push_back(std::char_traits<char>::to_char_type(current));
|
||||
}
|
||||
|
@ -1293,7 +1300,7 @@ scan_number_done:
|
|||
--position.chars_read_current_line;
|
||||
}
|
||||
|
||||
if (JSON_LIKELY(current != std::char_traits<char>::eof()))
|
||||
if (JSON_HEDLEY_LIKELY(current != std::char_traits<char>::eof()))
|
||||
{
|
||||
assert(not token_string.empty());
|
||||
token_string.pop_back();
|
||||
|
@ -1372,6 +1379,7 @@ scan_number_done:
|
|||
}
|
||||
|
||||
/// return syntax error message
|
||||
JSON_HEDLEY_RETURNS_NON_NULL
|
||||
constexpr const char* get_error_message() const noexcept
|
||||
{
|
||||
return error_message;
|
||||
|
|
|
@ -147,6 +147,7 @@ class parser
|
|||
}
|
||||
|
||||
template <typename SAX>
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
bool sax_parse(SAX* sax, const bool strict = true)
|
||||
{
|
||||
(void)detail::is_sax_static_asserts<SAX, BasicJsonType> {};
|
||||
|
@ -166,6 +167,7 @@ class parser
|
|||
|
||||
private:
|
||||
template <typename SAX>
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
bool sax_parse_internal(SAX* sax)
|
||||
{
|
||||
// stack to remember the hierarchy of structured values we are parsing
|
||||
|
@ -183,7 +185,7 @@ class parser
|
|||
{
|
||||
case token_type::begin_object:
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->start_object(std::size_t(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_object(std::size_t(-1))))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -191,7 +193,7 @@ class parser
|
|||
// closing } -> we are done
|
||||
if (get_token() == token_type::end_object)
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->end_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->end_object()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -199,20 +201,20 @@ class parser
|
|||
}
|
||||
|
||||
// parse key
|
||||
if (JSON_UNLIKELY(last_token != token_type::value_string))
|
||||
if (JSON_HEDLEY_UNLIKELY(last_token != token_type::value_string))
|
||||
{
|
||||
return sax->parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
parse_error::create(101, m_lexer.get_position(),
|
||||
exception_message(token_type::value_string, "object key")));
|
||||
}
|
||||
if (JSON_UNLIKELY(not sax->key(m_lexer.get_string())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->key(m_lexer.get_string())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// parse separator (:)
|
||||
if (JSON_UNLIKELY(get_token() != token_type::name_separator))
|
||||
if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::name_separator))
|
||||
{
|
||||
return sax->parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
|
@ -230,7 +232,7 @@ class parser
|
|||
|
||||
case token_type::begin_array:
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->start_array(std::size_t(-1))))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->start_array(std::size_t(-1))))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -238,7 +240,7 @@ class parser
|
|||
// closing ] -> we are done
|
||||
if (get_token() == token_type::end_array)
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->end_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->end_array()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -256,14 +258,14 @@ class parser
|
|||
{
|
||||
const auto res = m_lexer.get_number_float();
|
||||
|
||||
if (JSON_UNLIKELY(not std::isfinite(res)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not std::isfinite(res)))
|
||||
{
|
||||
return sax->parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
out_of_range::create(406, "number overflow parsing '" + m_lexer.get_token_string() + "'"));
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not sax->number_float(res, m_lexer.get_string())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->number_float(res, m_lexer.get_string())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -273,7 +275,7 @@ class parser
|
|||
|
||||
case token_type::literal_false:
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->boolean(false)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->boolean(false)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -282,7 +284,7 @@ class parser
|
|||
|
||||
case token_type::literal_null:
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->null()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->null()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -291,7 +293,7 @@ class parser
|
|||
|
||||
case token_type::literal_true:
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->boolean(true)))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->boolean(true)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -300,7 +302,7 @@ class parser
|
|||
|
||||
case token_type::value_integer:
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->number_integer(m_lexer.get_number_integer())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->number_integer(m_lexer.get_number_integer())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -309,7 +311,7 @@ class parser
|
|||
|
||||
case token_type::value_string:
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->string(m_lexer.get_string())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->string(m_lexer.get_string())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -318,7 +320,7 @@ class parser
|
|||
|
||||
case token_type::value_unsigned:
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->number_unsigned(m_lexer.get_number_unsigned())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->number_unsigned(m_lexer.get_number_unsigned())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -366,9 +368,9 @@ class parser
|
|||
}
|
||||
|
||||
// closing ]
|
||||
if (JSON_LIKELY(last_token == token_type::end_array))
|
||||
if (JSON_HEDLEY_LIKELY(last_token == token_type::end_array))
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->end_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->end_array()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -394,7 +396,7 @@ class parser
|
|||
if (get_token() == token_type::value_separator)
|
||||
{
|
||||
// parse key
|
||||
if (JSON_UNLIKELY(get_token() != token_type::value_string))
|
||||
if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::value_string))
|
||||
{
|
||||
return sax->parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
|
@ -402,13 +404,13 @@ class parser
|
|||
exception_message(token_type::value_string, "object key")));
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not sax->key(m_lexer.get_string())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->key(m_lexer.get_string())))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// parse separator (:)
|
||||
if (JSON_UNLIKELY(get_token() != token_type::name_separator))
|
||||
if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::name_separator))
|
||||
{
|
||||
return sax->parse_error(m_lexer.get_position(),
|
||||
m_lexer.get_token_string(),
|
||||
|
@ -422,9 +424,9 @@ class parser
|
|||
}
|
||||
|
||||
// closing }
|
||||
if (JSON_LIKELY(last_token == token_type::end_object))
|
||||
if (JSON_HEDLEY_LIKELY(last_token == token_type::end_object))
|
||||
{
|
||||
if (JSON_UNLIKELY(not sax->end_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not sax->end_object()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -118,17 +118,42 @@ class iter_impl
|
|||
to iterator is not defined.
|
||||
*/
|
||||
|
||||
/*!
|
||||
@brief const copy constructor
|
||||
@param[in] other const iterator to copy from
|
||||
@note This copy constuctor had to be defined explicitely to circumvent a bug
|
||||
occuring on msvc v19.0 compiler (VS 2015) debug build. For more
|
||||
information refer to: https://github.com/nlohmann/json/issues/1608
|
||||
*/
|
||||
iter_impl(const iter_impl<const BasicJsonType>& other) noexcept
|
||||
: m_object(other.m_object), m_it(other.m_it)
|
||||
{}
|
||||
|
||||
/*!
|
||||
@brief converting assignment
|
||||
@param[in] other const iterator to copy from
|
||||
@return const/non-const iterator
|
||||
@note It is not checked whether @a other is initialized.
|
||||
*/
|
||||
iter_impl& operator=(const iter_impl<const BasicJsonType>& other) noexcept
|
||||
{
|
||||
m_object = other.m_object;
|
||||
m_it = other.m_it;
|
||||
return *this;
|
||||
}
|
||||
|
||||
/*!
|
||||
@brief converting constructor
|
||||
@param[in] other non-const iterator to copy from
|
||||
@note It is not checked whether @a other is initialized.
|
||||
*/
|
||||
iter_impl(const iter_impl<typename std::remove_const<BasicJsonType>::type>& other) noexcept
|
||||
: m_object(other.m_object), m_it(other.m_it) {}
|
||||
: m_object(other.m_object), m_it(other.m_it)
|
||||
{}
|
||||
|
||||
/*!
|
||||
@brief converting assignment
|
||||
@param[in,out] other non-const iterator to copy from
|
||||
@param[in] other non-const iterator to copy from
|
||||
@return const/non-const iterator
|
||||
@note It is not checked whether @a other is initialized.
|
||||
*/
|
||||
|
@ -235,7 +260,7 @@ class iter_impl
|
|||
|
||||
default:
|
||||
{
|
||||
if (JSON_LIKELY(m_it.primitive_iterator.is_begin()))
|
||||
if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.is_begin()))
|
||||
{
|
||||
return *m_object;
|
||||
}
|
||||
|
@ -269,7 +294,7 @@ class iter_impl
|
|||
|
||||
default:
|
||||
{
|
||||
if (JSON_LIKELY(m_it.primitive_iterator.is_begin()))
|
||||
if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.is_begin()))
|
||||
{
|
||||
return m_object;
|
||||
}
|
||||
|
@ -372,7 +397,7 @@ class iter_impl
|
|||
bool operator==(const iter_impl& other) const
|
||||
{
|
||||
// if objects are not the same, the comparison is undefined
|
||||
if (JSON_UNLIKELY(m_object != other.m_object))
|
||||
if (JSON_HEDLEY_UNLIKELY(m_object != other.m_object))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(212, "cannot compare iterators of different containers"));
|
||||
}
|
||||
|
@ -408,7 +433,7 @@ class iter_impl
|
|||
bool operator<(const iter_impl& other) const
|
||||
{
|
||||
// if objects are not the same, the comparison is undefined
|
||||
if (JSON_UNLIKELY(m_object != other.m_object))
|
||||
if (JSON_HEDLEY_UNLIKELY(m_object != other.m_object))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(212, "cannot compare iterators of different containers"));
|
||||
}
|
||||
|
@ -568,7 +593,7 @@ class iter_impl
|
|||
|
||||
default:
|
||||
{
|
||||
if (JSON_LIKELY(m_it.primitive_iterator.get_value() == -n))
|
||||
if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.get_value() == -n))
|
||||
{
|
||||
return *m_object;
|
||||
}
|
||||
|
@ -586,7 +611,7 @@ class iter_impl
|
|||
{
|
||||
assert(m_object != nullptr);
|
||||
|
||||
if (JSON_LIKELY(m_object->is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(m_object->is_object()))
|
||||
{
|
||||
return m_it.object_iterator->first;
|
||||
}
|
||||
|
@ -609,5 +634,5 @@ class iter_impl
|
|||
/// the actual iterator of the associated instance
|
||||
internal_iterator<typename std::remove_const<BasicJsonType>::type> m_it {};
|
||||
};
|
||||
} // namespace detail
|
||||
} // namespace detail
|
||||
} // namespace nlohmann
|
||||
|
|
|
@ -13,15 +13,16 @@ template <typename It, typename = void>
|
|||
struct iterator_types {};
|
||||
|
||||
template <typename It>
|
||||
struct iterator_types<
|
||||
struct iterator_types <
|
||||
It,
|
||||
void_t<typename It::difference_type, typename It::value_type, typename It::pointer,
|
||||
typename It::reference, typename It::iterator_category>> {
|
||||
using difference_type = typename It::difference_type;
|
||||
using value_type = typename It::value_type;
|
||||
using pointer = typename It::pointer;
|
||||
using reference = typename It::reference;
|
||||
using iterator_category = typename It::iterator_category;
|
||||
typename It::reference, typename It::iterator_category >>
|
||||
{
|
||||
using difference_type = typename It::difference_type;
|
||||
using value_type = typename It::value_type;
|
||||
using pointer = typename It::pointer;
|
||||
using reference = typename It::reference;
|
||||
using iterator_category = typename It::iterator_category;
|
||||
};
|
||||
|
||||
// This is required as some compilers implement std::iterator_traits in a way that
|
||||
|
@ -32,18 +33,19 @@ struct iterator_traits
|
|||
};
|
||||
|
||||
template <typename T>
|
||||
struct iterator_traits<T, enable_if_t<!std::is_pointer<T>::value>>
|
||||
: iterator_types<T>
|
||||
struct iterator_traits < T, enable_if_t < !std::is_pointer<T>::value >>
|
||||
: iterator_types<T>
|
||||
{
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct iterator_traits<T*, enable_if_t<std::is_object<T>::value>> {
|
||||
using iterator_category = std::random_access_iterator_tag;
|
||||
using value_type = T;
|
||||
using difference_type = ptrdiff_t;
|
||||
using pointer = T*;
|
||||
using reference = T&;
|
||||
struct iterator_traits<T*, enable_if_t<std::is_object<T>::value>>
|
||||
{
|
||||
using iterator_category = std::random_access_iterator_tag;
|
||||
using value_type = T;
|
||||
using difference_type = ptrdiff_t;
|
||||
using pointer = T*;
|
||||
using reference = T&;
|
||||
};
|
||||
} // namespace detail
|
||||
} // namespace nlohmann
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
#include <algorithm> // all_of
|
||||
#include <cassert> // assert
|
||||
#include <cctype> // isdigit
|
||||
#include <numeric> // accumulate
|
||||
#include <string> // string
|
||||
#include <utility> // move
|
||||
|
@ -244,7 +245,7 @@ class json_pointer
|
|||
*/
|
||||
void pop_back()
|
||||
{
|
||||
if (JSON_UNLIKELY(empty()))
|
||||
if (JSON_HEDLEY_UNLIKELY(empty()))
|
||||
{
|
||||
JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent"));
|
||||
}
|
||||
|
@ -268,7 +269,7 @@ class json_pointer
|
|||
*/
|
||||
const std::string& back()
|
||||
{
|
||||
if (JSON_UNLIKELY(empty()))
|
||||
if (JSON_HEDLEY_UNLIKELY(empty()))
|
||||
{
|
||||
JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent"));
|
||||
}
|
||||
|
@ -332,7 +333,7 @@ class json_pointer
|
|||
const int res = std::stoi(s, &processed_chars);
|
||||
|
||||
// check if the string was completely read
|
||||
if (JSON_UNLIKELY(processed_chars != s.size()))
|
||||
if (JSON_HEDLEY_UNLIKELY(processed_chars != s.size()))
|
||||
{
|
||||
JSON_THROW(detail::out_of_range::create(404, "unresolved reference token '" + s + "'"));
|
||||
}
|
||||
|
@ -342,7 +343,7 @@ class json_pointer
|
|||
|
||||
json_pointer top() const
|
||||
{
|
||||
if (JSON_UNLIKELY(empty()))
|
||||
if (JSON_HEDLEY_UNLIKELY(empty()))
|
||||
{
|
||||
JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent"));
|
||||
}
|
||||
|
@ -369,7 +370,7 @@ class json_pointer
|
|||
// j which will be overwritten by a primitive value
|
||||
for (const auto& reference_token : reference_tokens)
|
||||
{
|
||||
switch (result->m_type)
|
||||
switch (result->type())
|
||||
{
|
||||
case detail::value_t::null:
|
||||
{
|
||||
|
@ -446,14 +447,14 @@ class json_pointer
|
|||
for (const auto& reference_token : reference_tokens)
|
||||
{
|
||||
// convert null values to arrays or objects before continuing
|
||||
if (ptr->m_type == detail::value_t::null)
|
||||
if (ptr->is_null())
|
||||
{
|
||||
// check if reference token is a number
|
||||
const bool nums =
|
||||
std::all_of(reference_token.begin(), reference_token.end(),
|
||||
[](const char x)
|
||||
[](const unsigned char x)
|
||||
{
|
||||
return x >= '0' and x <= '9';
|
||||
return std::isdigit(x);
|
||||
});
|
||||
|
||||
// change value to array for numbers or "-" or to object otherwise
|
||||
|
@ -462,7 +463,7 @@ class json_pointer
|
|||
: detail::value_t::object;
|
||||
}
|
||||
|
||||
switch (ptr->m_type)
|
||||
switch (ptr->type())
|
||||
{
|
||||
case detail::value_t::object:
|
||||
{
|
||||
|
@ -474,7 +475,7 @@ class json_pointer
|
|||
case detail::value_t::array:
|
||||
{
|
||||
// error condition (cf. RFC 6901, Sect. 4)
|
||||
if (JSON_UNLIKELY(reference_token.size() > 1 and reference_token[0] == '0'))
|
||||
if (JSON_HEDLEY_UNLIKELY(reference_token.size() > 1 and reference_token[0] == '0'))
|
||||
{
|
||||
JSON_THROW(detail::parse_error::create(106, 0,
|
||||
"array index '" + reference_token +
|
||||
|
@ -521,7 +522,7 @@ class json_pointer
|
|||
using size_type = typename BasicJsonType::size_type;
|
||||
for (const auto& reference_token : reference_tokens)
|
||||
{
|
||||
switch (ptr->m_type)
|
||||
switch (ptr->type())
|
||||
{
|
||||
case detail::value_t::object:
|
||||
{
|
||||
|
@ -532,7 +533,7 @@ class json_pointer
|
|||
|
||||
case detail::value_t::array:
|
||||
{
|
||||
if (JSON_UNLIKELY(reference_token == "-"))
|
||||
if (JSON_HEDLEY_UNLIKELY(reference_token == "-"))
|
||||
{
|
||||
// "-" always fails the range check
|
||||
JSON_THROW(detail::out_of_range::create(402,
|
||||
|
@ -541,7 +542,7 @@ class json_pointer
|
|||
}
|
||||
|
||||
// error condition (cf. RFC 6901, Sect. 4)
|
||||
if (JSON_UNLIKELY(reference_token.size() > 1 and reference_token[0] == '0'))
|
||||
if (JSON_HEDLEY_UNLIKELY(reference_token.size() > 1 and reference_token[0] == '0'))
|
||||
{
|
||||
JSON_THROW(detail::parse_error::create(106, 0,
|
||||
"array index '" + reference_token +
|
||||
|
@ -586,7 +587,7 @@ class json_pointer
|
|||
using size_type = typename BasicJsonType::size_type;
|
||||
for (const auto& reference_token : reference_tokens)
|
||||
{
|
||||
switch (ptr->m_type)
|
||||
switch (ptr->type())
|
||||
{
|
||||
case detail::value_t::object:
|
||||
{
|
||||
|
@ -597,7 +598,7 @@ class json_pointer
|
|||
|
||||
case detail::value_t::array:
|
||||
{
|
||||
if (JSON_UNLIKELY(reference_token == "-"))
|
||||
if (JSON_HEDLEY_UNLIKELY(reference_token == "-"))
|
||||
{
|
||||
// "-" cannot be used for const access
|
||||
JSON_THROW(detail::out_of_range::create(402,
|
||||
|
@ -606,7 +607,7 @@ class json_pointer
|
|||
}
|
||||
|
||||
// error condition (cf. RFC 6901, Sect. 4)
|
||||
if (JSON_UNLIKELY(reference_token.size() > 1 and reference_token[0] == '0'))
|
||||
if (JSON_HEDLEY_UNLIKELY(reference_token.size() > 1 and reference_token[0] == '0'))
|
||||
{
|
||||
JSON_THROW(detail::parse_error::create(106, 0,
|
||||
"array index '" + reference_token +
|
||||
|
@ -645,7 +646,7 @@ class json_pointer
|
|||
using size_type = typename BasicJsonType::size_type;
|
||||
for (const auto& reference_token : reference_tokens)
|
||||
{
|
||||
switch (ptr->m_type)
|
||||
switch (ptr->type())
|
||||
{
|
||||
case detail::value_t::object:
|
||||
{
|
||||
|
@ -656,7 +657,7 @@ class json_pointer
|
|||
|
||||
case detail::value_t::array:
|
||||
{
|
||||
if (JSON_UNLIKELY(reference_token == "-"))
|
||||
if (JSON_HEDLEY_UNLIKELY(reference_token == "-"))
|
||||
{
|
||||
// "-" always fails the range check
|
||||
JSON_THROW(detail::out_of_range::create(402,
|
||||
|
@ -665,7 +666,7 @@ class json_pointer
|
|||
}
|
||||
|
||||
// error condition (cf. RFC 6901, Sect. 4)
|
||||
if (JSON_UNLIKELY(reference_token.size() > 1 and reference_token[0] == '0'))
|
||||
if (JSON_HEDLEY_UNLIKELY(reference_token.size() > 1 and reference_token[0] == '0'))
|
||||
{
|
||||
JSON_THROW(detail::parse_error::create(106, 0,
|
||||
"array index '" + reference_token +
|
||||
|
@ -692,6 +693,77 @@ class json_pointer
|
|||
return *ptr;
|
||||
}
|
||||
|
||||
/*!
|
||||
@throw parse_error.106 if an array index begins with '0'
|
||||
@throw parse_error.109 if an array index was not a number
|
||||
*/
|
||||
bool contains(const BasicJsonType* ptr) const
|
||||
{
|
||||
using size_type = typename BasicJsonType::size_type;
|
||||
for (const auto& reference_token : reference_tokens)
|
||||
{
|
||||
switch (ptr->type())
|
||||
{
|
||||
case detail::value_t::object:
|
||||
{
|
||||
if (not ptr->contains(reference_token))
|
||||
{
|
||||
// we did not find the key in the object
|
||||
return false;
|
||||
}
|
||||
|
||||
ptr = &ptr->operator[](reference_token);
|
||||
break;
|
||||
}
|
||||
|
||||
case detail::value_t::array:
|
||||
{
|
||||
if (JSON_HEDLEY_UNLIKELY(reference_token == "-"))
|
||||
{
|
||||
// "-" always fails the range check
|
||||
return false;
|
||||
}
|
||||
|
||||
// error condition (cf. RFC 6901, Sect. 4)
|
||||
if (JSON_HEDLEY_UNLIKELY(reference_token.size() > 1 and reference_token[0] == '0'))
|
||||
{
|
||||
JSON_THROW(detail::parse_error::create(106, 0,
|
||||
"array index '" + reference_token +
|
||||
"' must not begin with '0'"));
|
||||
}
|
||||
|
||||
JSON_TRY
|
||||
{
|
||||
const auto idx = static_cast<size_type>(array_index(reference_token));
|
||||
if (idx >= ptr->size())
|
||||
{
|
||||
// index out of range
|
||||
return false;
|
||||
}
|
||||
|
||||
ptr = &ptr->operator[](idx);
|
||||
break;
|
||||
}
|
||||
JSON_CATCH(std::invalid_argument&)
|
||||
{
|
||||
JSON_THROW(detail::parse_error::create(109, 0, "array index '" + reference_token + "' is not a number"));
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
// we do not expect primitive values if there is still a
|
||||
// reference token to process
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// no reference token left means we found a primitive value
|
||||
return true;
|
||||
}
|
||||
|
||||
/*!
|
||||
@brief split the string input to reference tokens
|
||||
|
||||
|
@ -712,7 +784,7 @@ class json_pointer
|
|||
}
|
||||
|
||||
// check if nonempty reference string begins with slash
|
||||
if (JSON_UNLIKELY(reference_string[0] != '/'))
|
||||
if (JSON_HEDLEY_UNLIKELY(reference_string[0] != '/'))
|
||||
{
|
||||
JSON_THROW(detail::parse_error::create(107, 1,
|
||||
"JSON pointer must be empty or begin with '/' - was: '" +
|
||||
|
@ -747,9 +819,9 @@ class json_pointer
|
|||
assert(reference_token[pos] == '~');
|
||||
|
||||
// ~ must be followed by 0 or 1
|
||||
if (JSON_UNLIKELY(pos == reference_token.size() - 1 or
|
||||
(reference_token[pos + 1] != '0' and
|
||||
reference_token[pos + 1] != '1')))
|
||||
if (JSON_HEDLEY_UNLIKELY(pos == reference_token.size() - 1 or
|
||||
(reference_token[pos + 1] != '0' and
|
||||
reference_token[pos + 1] != '1')))
|
||||
{
|
||||
JSON_THROW(detail::parse_error::create(108, 0, "escape character '~' must be followed with '0' or '1'"));
|
||||
}
|
||||
|
@ -813,7 +885,7 @@ class json_pointer
|
|||
const BasicJsonType& value,
|
||||
BasicJsonType& result)
|
||||
{
|
||||
switch (value.m_type)
|
||||
switch (value.type())
|
||||
{
|
||||
case detail::value_t::array:
|
||||
{
|
||||
|
@ -874,7 +946,7 @@ class json_pointer
|
|||
static BasicJsonType
|
||||
unflatten(const BasicJsonType& value)
|
||||
{
|
||||
if (JSON_UNLIKELY(not value.is_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not value.is_object()))
|
||||
{
|
||||
JSON_THROW(detail::type_error::create(314, "only objects can be unflattened"));
|
||||
}
|
||||
|
@ -884,7 +956,7 @@ class json_pointer
|
|||
// iterate the JSON object values
|
||||
for (const auto& element : *value.m_value.object)
|
||||
{
|
||||
if (JSON_UNLIKELY(not element.second.is_primitive()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not element.second.is_primitive()))
|
||||
{
|
||||
JSON_THROW(detail::type_error::create(315, "values in object must be primitive"));
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#pragma once
|
||||
|
||||
#include <utility> // pair
|
||||
#include <nlohmann/thirdparty/hedley/hedley.hpp>
|
||||
|
||||
// This file contains all internal macro definitions
|
||||
// You MUST include macro_unscope.hpp at the end of json.hpp to undef all of them
|
||||
|
@ -18,6 +19,14 @@
|
|||
#endif
|
||||
#endif
|
||||
|
||||
// C++ language standard detection
|
||||
#if (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464
|
||||
#define JSON_HAS_CPP_17
|
||||
#define JSON_HAS_CPP_14
|
||||
#elif (defined(__cplusplus) && __cplusplus >= 201402L) || (defined(_HAS_CXX14) && _HAS_CXX14 == 1)
|
||||
#define JSON_HAS_CPP_14
|
||||
#endif
|
||||
|
||||
// disable float-equal warnings on GCC/clang
|
||||
#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__)
|
||||
#pragma GCC diagnostic push
|
||||
|
@ -30,28 +39,6 @@
|
|||
#pragma GCC diagnostic ignored "-Wdocumentation"
|
||||
#endif
|
||||
|
||||
// allow for portable deprecation warnings
|
||||
#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__)
|
||||
#define JSON_DEPRECATED __attribute__((deprecated))
|
||||
#elif defined(_MSC_VER)
|
||||
#define JSON_DEPRECATED __declspec(deprecated)
|
||||
#else
|
||||
#define JSON_DEPRECATED
|
||||
#endif
|
||||
|
||||
// allow for portable nodiscard warnings
|
||||
#if defined(__has_cpp_attribute)
|
||||
#if __has_cpp_attribute(nodiscard)
|
||||
#define JSON_NODISCARD [[nodiscard]]
|
||||
#elif __has_cpp_attribute(gnu::warn_unused_result)
|
||||
#define JSON_NODISCARD [[gnu::warn_unused_result]]
|
||||
#else
|
||||
#define JSON_NODISCARD
|
||||
#endif
|
||||
#else
|
||||
#define JSON_NODISCARD
|
||||
#endif
|
||||
|
||||
// allow to disable exceptions
|
||||
#if (defined(__cpp_exceptions) || defined(__EXCEPTIONS) || defined(_CPPUNWIND)) && !defined(JSON_NOEXCEPTION)
|
||||
#define JSON_THROW(exception) throw exception
|
||||
|
@ -86,23 +73,6 @@
|
|||
#define JSON_INTERNAL_CATCH JSON_INTERNAL_CATCH_USER
|
||||
#endif
|
||||
|
||||
// manual branch prediction
|
||||
#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__)
|
||||
#define JSON_LIKELY(x) __builtin_expect(x, 1)
|
||||
#define JSON_UNLIKELY(x) __builtin_expect(x, 0)
|
||||
#else
|
||||
#define JSON_LIKELY(x) x
|
||||
#define JSON_UNLIKELY(x) x
|
||||
#endif
|
||||
|
||||
// C++ language standard detection
|
||||
#if (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464
|
||||
#define JSON_HAS_CPP_17
|
||||
#define JSON_HAS_CPP_14
|
||||
#elif (defined(__cplusplus) && __cplusplus >= 201402L) || (defined(_HAS_CXX14) && _HAS_CXX14 == 1)
|
||||
#define JSON_HAS_CPP_14
|
||||
#endif
|
||||
|
||||
/*!
|
||||
@brief macro to briefly define a mapping between an enum and JSON
|
||||
@def NLOHMANN_JSON_SERIALIZE_ENUM
|
||||
|
|
|
@ -13,11 +13,9 @@
|
|||
#undef JSON_CATCH
|
||||
#undef JSON_THROW
|
||||
#undef JSON_TRY
|
||||
#undef JSON_LIKELY
|
||||
#undef JSON_UNLIKELY
|
||||
#undef JSON_DEPRECATED
|
||||
#undef JSON_NODISCARD
|
||||
#undef JSON_HAS_CPP_14
|
||||
#undef JSON_HAS_CPP_17
|
||||
#undef NLOHMANN_BASIC_JSON_TPL_DECLARATION
|
||||
#undef NLOHMANN_BASIC_JSON_TPL
|
||||
|
||||
#include <nlohmann/thirdparty/hedley/hedley_undef.hpp>
|
||||
|
|
|
@ -24,119 +24,119 @@ using number_integer_function_t =
|
|||
|
||||
template <typename T, typename Unsigned>
|
||||
using number_unsigned_function_t =
|
||||
decltype(std::declval<T &>().number_unsigned(std::declval<Unsigned>()));
|
||||
decltype(std::declval<T&>().number_unsigned(std::declval<Unsigned>()));
|
||||
|
||||
template <typename T, typename Float, typename String>
|
||||
using number_float_function_t = decltype(std::declval<T &>().number_float(
|
||||
std::declval<Float>(), std::declval<const String &>()));
|
||||
using number_float_function_t = decltype(std::declval<T&>().number_float(
|
||||
std::declval<Float>(), std::declval<const String&>()));
|
||||
|
||||
template <typename T, typename String>
|
||||
using string_function_t =
|
||||
decltype(std::declval<T &>().string(std::declval<String &>()));
|
||||
decltype(std::declval<T&>().string(std::declval<String&>()));
|
||||
|
||||
template <typename T>
|
||||
using start_object_function_t =
|
||||
decltype(std::declval<T &>().start_object(std::declval<std::size_t>()));
|
||||
decltype(std::declval<T&>().start_object(std::declval<std::size_t>()));
|
||||
|
||||
template <typename T, typename String>
|
||||
using key_function_t =
|
||||
decltype(std::declval<T &>().key(std::declval<String &>()));
|
||||
decltype(std::declval<T&>().key(std::declval<String&>()));
|
||||
|
||||
template <typename T>
|
||||
using end_object_function_t = decltype(std::declval<T &>().end_object());
|
||||
using end_object_function_t = decltype(std::declval<T&>().end_object());
|
||||
|
||||
template <typename T>
|
||||
using start_array_function_t =
|
||||
decltype(std::declval<T &>().start_array(std::declval<std::size_t>()));
|
||||
decltype(std::declval<T&>().start_array(std::declval<std::size_t>()));
|
||||
|
||||
template <typename T>
|
||||
using end_array_function_t = decltype(std::declval<T &>().end_array());
|
||||
using end_array_function_t = decltype(std::declval<T&>().end_array());
|
||||
|
||||
template <typename T, typename Exception>
|
||||
using parse_error_function_t = decltype(std::declval<T &>().parse_error(
|
||||
std::declval<std::size_t>(), std::declval<const std::string &>(),
|
||||
std::declval<const Exception &>()));
|
||||
using parse_error_function_t = decltype(std::declval<T&>().parse_error(
|
||||
std::declval<std::size_t>(), std::declval<const std::string&>(),
|
||||
std::declval<const Exception&>()));
|
||||
|
||||
template <typename SAX, typename BasicJsonType>
|
||||
struct is_sax
|
||||
{
|
||||
private:
|
||||
static_assert(is_basic_json<BasicJsonType>::value,
|
||||
"BasicJsonType must be of type basic_json<...>");
|
||||
private:
|
||||
static_assert(is_basic_json<BasicJsonType>::value,
|
||||
"BasicJsonType must be of type basic_json<...>");
|
||||
|
||||
using number_integer_t = typename BasicJsonType::number_integer_t;
|
||||
using number_unsigned_t = typename BasicJsonType::number_unsigned_t;
|
||||
using number_float_t = typename BasicJsonType::number_float_t;
|
||||
using string_t = typename BasicJsonType::string_t;
|
||||
using exception_t = typename BasicJsonType::exception;
|
||||
using number_integer_t = typename BasicJsonType::number_integer_t;
|
||||
using number_unsigned_t = typename BasicJsonType::number_unsigned_t;
|
||||
using number_float_t = typename BasicJsonType::number_float_t;
|
||||
using string_t = typename BasicJsonType::string_t;
|
||||
using exception_t = typename BasicJsonType::exception;
|
||||
|
||||
public:
|
||||
static constexpr bool value =
|
||||
is_detected_exact<bool, null_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, boolean_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, number_integer_function_t, SAX,
|
||||
number_integer_t>::value &&
|
||||
is_detected_exact<bool, number_unsigned_function_t, SAX,
|
||||
number_unsigned_t>::value &&
|
||||
is_detected_exact<bool, number_float_function_t, SAX, number_float_t,
|
||||
string_t>::value &&
|
||||
is_detected_exact<bool, string_function_t, SAX, string_t>::value &&
|
||||
is_detected_exact<bool, start_object_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, key_function_t, SAX, string_t>::value &&
|
||||
is_detected_exact<bool, end_object_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, start_array_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, end_array_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, parse_error_function_t, SAX, exception_t>::value;
|
||||
public:
|
||||
static constexpr bool value =
|
||||
is_detected_exact<bool, null_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, boolean_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, number_integer_function_t, SAX,
|
||||
number_integer_t>::value &&
|
||||
is_detected_exact<bool, number_unsigned_function_t, SAX,
|
||||
number_unsigned_t>::value &&
|
||||
is_detected_exact<bool, number_float_function_t, SAX, number_float_t,
|
||||
string_t>::value &&
|
||||
is_detected_exact<bool, string_function_t, SAX, string_t>::value &&
|
||||
is_detected_exact<bool, start_object_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, key_function_t, SAX, string_t>::value &&
|
||||
is_detected_exact<bool, end_object_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, start_array_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, end_array_function_t, SAX>::value &&
|
||||
is_detected_exact<bool, parse_error_function_t, SAX, exception_t>::value;
|
||||
};
|
||||
|
||||
template <typename SAX, typename BasicJsonType>
|
||||
struct is_sax_static_asserts
|
||||
{
|
||||
private:
|
||||
static_assert(is_basic_json<BasicJsonType>::value,
|
||||
"BasicJsonType must be of type basic_json<...>");
|
||||
private:
|
||||
static_assert(is_basic_json<BasicJsonType>::value,
|
||||
"BasicJsonType must be of type basic_json<...>");
|
||||
|
||||
using number_integer_t = typename BasicJsonType::number_integer_t;
|
||||
using number_unsigned_t = typename BasicJsonType::number_unsigned_t;
|
||||
using number_float_t = typename BasicJsonType::number_float_t;
|
||||
using string_t = typename BasicJsonType::string_t;
|
||||
using exception_t = typename BasicJsonType::exception;
|
||||
using number_integer_t = typename BasicJsonType::number_integer_t;
|
||||
using number_unsigned_t = typename BasicJsonType::number_unsigned_t;
|
||||
using number_float_t = typename BasicJsonType::number_float_t;
|
||||
using string_t = typename BasicJsonType::string_t;
|
||||
using exception_t = typename BasicJsonType::exception;
|
||||
|
||||
public:
|
||||
static_assert(is_detected_exact<bool, null_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool null()");
|
||||
static_assert(is_detected_exact<bool, boolean_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool boolean(bool)");
|
||||
static_assert(is_detected_exact<bool, boolean_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool boolean(bool)");
|
||||
static_assert(
|
||||
is_detected_exact<bool, number_integer_function_t, SAX,
|
||||
number_integer_t>::value,
|
||||
"Missing/invalid function: bool number_integer(number_integer_t)");
|
||||
static_assert(
|
||||
is_detected_exact<bool, number_unsigned_function_t, SAX,
|
||||
number_unsigned_t>::value,
|
||||
"Missing/invalid function: bool number_unsigned(number_unsigned_t)");
|
||||
static_assert(is_detected_exact<bool, number_float_function_t, SAX,
|
||||
number_float_t, string_t>::value,
|
||||
"Missing/invalid function: bool number_float(number_float_t, const string_t&)");
|
||||
static_assert(
|
||||
is_detected_exact<bool, string_function_t, SAX, string_t>::value,
|
||||
"Missing/invalid function: bool string(string_t&)");
|
||||
static_assert(is_detected_exact<bool, start_object_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool start_object(std::size_t)");
|
||||
static_assert(is_detected_exact<bool, key_function_t, SAX, string_t>::value,
|
||||
"Missing/invalid function: bool key(string_t&)");
|
||||
static_assert(is_detected_exact<bool, end_object_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool end_object()");
|
||||
static_assert(is_detected_exact<bool, start_array_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool start_array(std::size_t)");
|
||||
static_assert(is_detected_exact<bool, end_array_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool end_array()");
|
||||
static_assert(
|
||||
is_detected_exact<bool, parse_error_function_t, SAX, exception_t>::value,
|
||||
"Missing/invalid function: bool parse_error(std::size_t, const "
|
||||
"std::string&, const exception&)");
|
||||
public:
|
||||
static_assert(is_detected_exact<bool, null_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool null()");
|
||||
static_assert(is_detected_exact<bool, boolean_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool boolean(bool)");
|
||||
static_assert(is_detected_exact<bool, boolean_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool boolean(bool)");
|
||||
static_assert(
|
||||
is_detected_exact<bool, number_integer_function_t, SAX,
|
||||
number_integer_t>::value,
|
||||
"Missing/invalid function: bool number_integer(number_integer_t)");
|
||||
static_assert(
|
||||
is_detected_exact<bool, number_unsigned_function_t, SAX,
|
||||
number_unsigned_t>::value,
|
||||
"Missing/invalid function: bool number_unsigned(number_unsigned_t)");
|
||||
static_assert(is_detected_exact<bool, number_float_function_t, SAX,
|
||||
number_float_t, string_t>::value,
|
||||
"Missing/invalid function: bool number_float(number_float_t, const string_t&)");
|
||||
static_assert(
|
||||
is_detected_exact<bool, string_function_t, SAX, string_t>::value,
|
||||
"Missing/invalid function: bool string(string_t&)");
|
||||
static_assert(is_detected_exact<bool, start_object_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool start_object(std::size_t)");
|
||||
static_assert(is_detected_exact<bool, key_function_t, SAX, string_t>::value,
|
||||
"Missing/invalid function: bool key(string_t&)");
|
||||
static_assert(is_detected_exact<bool, end_object_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool end_object()");
|
||||
static_assert(is_detected_exact<bool, start_array_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool start_array(std::size_t)");
|
||||
static_assert(is_detected_exact<bool, end_array_function_t, SAX>::value,
|
||||
"Missing/invalid function: bool end_array()");
|
||||
static_assert(
|
||||
is_detected_exact<bool, parse_error_function_t, SAX, exception_t>::value,
|
||||
"Missing/invalid function: bool parse_error(std::size_t, const "
|
||||
"std::string&, const exception&)");
|
||||
};
|
||||
} // namespace detail
|
||||
} // namespace nlohmann
|
||||
|
|
|
@ -192,10 +192,19 @@ struct is_constructible_object_type_impl <
|
|||
using object_t = typename BasicJsonType::object_t;
|
||||
|
||||
static constexpr bool value =
|
||||
(std::is_constructible<typename ConstructibleObjectType::key_type, typename object_t::key_type>::value and
|
||||
std::is_same<typename object_t::mapped_type, typename ConstructibleObjectType::mapped_type>::value) or
|
||||
(has_from_json<BasicJsonType, typename ConstructibleObjectType::mapped_type>::value or
|
||||
has_non_default_from_json<BasicJsonType, typename ConstructibleObjectType::mapped_type >::value);
|
||||
(std::is_default_constructible<ConstructibleObjectType>::value and
|
||||
(std::is_move_assignable<ConstructibleObjectType>::value or
|
||||
std::is_copy_assignable<ConstructibleObjectType>::value) and
|
||||
(std::is_constructible<typename ConstructibleObjectType::key_type,
|
||||
typename object_t::key_type>::value and
|
||||
std::is_same <
|
||||
typename object_t::mapped_type,
|
||||
typename ConstructibleObjectType::mapped_type >::value)) or
|
||||
(has_from_json<BasicJsonType,
|
||||
typename ConstructibleObjectType::mapped_type>::value or
|
||||
has_non_default_from_json <
|
||||
BasicJsonType,
|
||||
typename ConstructibleObjectType::mapped_type >::value);
|
||||
};
|
||||
|
||||
template <typename BasicJsonType, typename ConstructibleObjectType>
|
||||
|
@ -278,20 +287,24 @@ struct is_constructible_array_type_impl <
|
|||
BasicJsonType, ConstructibleArrayType,
|
||||
enable_if_t<not std::is_same<ConstructibleArrayType,
|
||||
typename BasicJsonType::value_type>::value and
|
||||
is_detected<value_type_t, ConstructibleArrayType>::value and
|
||||
is_detected<iterator_t, ConstructibleArrayType>::value and
|
||||
is_complete_type<
|
||||
detected_t<value_type_t, ConstructibleArrayType>>::value >>
|
||||
std::is_default_constructible<ConstructibleArrayType>::value and
|
||||
(std::is_move_assignable<ConstructibleArrayType>::value or
|
||||
std::is_copy_assignable<ConstructibleArrayType>::value) and
|
||||
is_detected<value_type_t, ConstructibleArrayType>::value and
|
||||
is_detected<iterator_t, ConstructibleArrayType>::value and
|
||||
is_complete_type<
|
||||
detected_t<value_type_t, ConstructibleArrayType>>::value >>
|
||||
{
|
||||
static constexpr bool value =
|
||||
// This is needed because json_reverse_iterator has a ::iterator type,
|
||||
// furthermore, std::back_insert_iterator (and other iterators) have a base class `iterator`...
|
||||
// Therefore it is detected as a ConstructibleArrayType.
|
||||
// The real fix would be to have an Iterable concept.
|
||||
not is_iterator_traits <
|
||||
iterator_traits<ConstructibleArrayType >>::value and
|
||||
// furthermore, std::back_insert_iterator (and other iterators) have a
|
||||
// base class `iterator`... Therefore it is detected as a
|
||||
// ConstructibleArrayType. The real fix would be to have an Iterable
|
||||
// concept.
|
||||
not is_iterator_traits<iterator_traits<ConstructibleArrayType>>::value and
|
||||
|
||||
(std::is_same<typename ConstructibleArrayType::value_type, typename BasicJsonType::array_t::value_type>::value or
|
||||
(std::is_same<typename ConstructibleArrayType::value_type,
|
||||
typename BasicJsonType::array_t::value_type>::value or
|
||||
has_from_json<BasicJsonType,
|
||||
typename ConstructibleArrayType::value_type>::value or
|
||||
has_non_default_from_json <
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include <string> // string
|
||||
|
||||
#include <nlohmann/detail/input/binary_reader.hpp>
|
||||
#include <nlohmann/detail/macro_scope.hpp>
|
||||
#include <nlohmann/detail/output/output_adapters.hpp>
|
||||
|
||||
namespace nlohmann
|
||||
|
@ -714,7 +715,7 @@ class binary_writer
|
|||
static std::size_t calc_bson_entry_header_size(const string_t& name)
|
||||
{
|
||||
const auto it = name.find(static_cast<typename string_t::value_type>(0));
|
||||
if (JSON_UNLIKELY(it != BasicJsonType::string_t::npos))
|
||||
if (JSON_HEDLEY_UNLIKELY(it != BasicJsonType::string_t::npos))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(409,
|
||||
"BSON key cannot contain code point U+0000 (at byte " + std::to_string(it) + ")"));
|
||||
|
@ -1204,19 +1205,19 @@ class binary_writer
|
|||
|
||||
case value_t::number_unsigned:
|
||||
{
|
||||
if (j.m_value.number_unsigned <= (std::numeric_limits<std::int8_t>::max)())
|
||||
if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int8_t>::max)()))
|
||||
{
|
||||
return 'i';
|
||||
}
|
||||
if (j.m_value.number_unsigned <= (std::numeric_limits<std::uint8_t>::max)())
|
||||
if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::uint8_t>::max)()))
|
||||
{
|
||||
return 'U';
|
||||
}
|
||||
if (j.m_value.number_unsigned <= (std::numeric_limits<std::int16_t>::max)())
|
||||
if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int16_t>::max)()))
|
||||
{
|
||||
return 'I';
|
||||
}
|
||||
if (j.m_value.number_unsigned <= (std::numeric_limits<std::int32_t>::max)())
|
||||
if (j.m_value.number_unsigned <= static_cast<std::uint64_t>((std::numeric_limits<std::int32_t>::max)()))
|
||||
{
|
||||
return 'l';
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include <ostream> // basic_ostream
|
||||
#include <string> // basic_string
|
||||
#include <vector> // vector
|
||||
#include <nlohmann/detail/macro_scope.hpp>
|
||||
|
||||
namespace nlohmann
|
||||
{
|
||||
|
@ -39,6 +40,7 @@ class output_vector_adapter : public output_adapter_protocol<CharType>
|
|||
v.push_back(c);
|
||||
}
|
||||
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
void write_characters(const CharType* s, std::size_t length) override
|
||||
{
|
||||
std::copy(s, s + length, std::back_inserter(v));
|
||||
|
@ -62,6 +64,7 @@ class output_stream_adapter : public output_adapter_protocol<CharType>
|
|||
stream.put(c);
|
||||
}
|
||||
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
void write_characters(const CharType* s, std::size_t length) override
|
||||
{
|
||||
stream.write(s, static_cast<std::streamsize>(length));
|
||||
|
@ -85,6 +88,7 @@ class output_string_adapter : public output_adapter_protocol<CharType>
|
|||
str.push_back(c);
|
||||
}
|
||||
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
void write_characters(const CharType* s, std::size_t length) override
|
||||
{
|
||||
str.append(s, length);
|
||||
|
|
|
@ -110,7 +110,7 @@ class serializer
|
|||
|
||||
// variable to hold indentation for recursive calls
|
||||
const auto new_indent = current_indent + indent_step;
|
||||
if (JSON_UNLIKELY(indent_string.size() < new_indent))
|
||||
if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent))
|
||||
{
|
||||
indent_string.resize(indent_string.size() * 2, ' ');
|
||||
}
|
||||
|
@ -183,7 +183,7 @@ class serializer
|
|||
|
||||
// variable to hold indentation for recursive calls
|
||||
const auto new_indent = current_indent + indent_step;
|
||||
if (JSON_UNLIKELY(indent_string.size() < new_indent))
|
||||
if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent))
|
||||
{
|
||||
indent_string.resize(indent_string.size() * 2, ' ');
|
||||
}
|
||||
|
@ -498,7 +498,7 @@ class serializer
|
|||
}
|
||||
|
||||
// we finished processing the string
|
||||
if (JSON_LIKELY(state == UTF8_ACCEPT))
|
||||
if (JSON_HEDLEY_LIKELY(state == UTF8_ACCEPT))
|
||||
{
|
||||
// write buffer
|
||||
if (bytes > 0)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -31,8 +31,8 @@ SOFTWARE.
|
|||
#define INCLUDE_NLOHMANN_JSON_HPP_
|
||||
|
||||
#define NLOHMANN_JSON_VERSION_MAJOR 3
|
||||
#define NLOHMANN_JSON_VERSION_MINOR 6
|
||||
#define NLOHMANN_JSON_VERSION_PATCH 1
|
||||
#define NLOHMANN_JSON_VERSION_MINOR 7
|
||||
#define NLOHMANN_JSON_VERSION_PATCH 0
|
||||
|
||||
#include <algorithm> // all_of, find, for_each
|
||||
#include <cassert> // assert
|
||||
|
@ -319,7 +319,7 @@ class basic_json
|
|||
|
||||
@since 2.1.0
|
||||
*/
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json meta()
|
||||
{
|
||||
basic_json result;
|
||||
|
@ -824,6 +824,7 @@ class basic_json
|
|||
|
||||
/// helper for exception-safe object creation
|
||||
template<typename T, typename... Args>
|
||||
JSON_HEDLEY_RETURNS_NON_NULL
|
||||
static T* create(Args&& ... args)
|
||||
{
|
||||
AllocatorType<T> alloc;
|
||||
|
@ -950,9 +951,9 @@ class basic_json
|
|||
default:
|
||||
{
|
||||
object = nullptr; // silence warning, see #821
|
||||
if (JSON_UNLIKELY(t == value_t::null))
|
||||
if (JSON_HEDLEY_UNLIKELY(t == value_t::null))
|
||||
{
|
||||
JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 3.6.1")); // LCOV_EXCL_LINE
|
||||
JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 3.7.0")); // LCOV_EXCL_LINE
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -1427,7 +1428,7 @@ class basic_json
|
|||
}
|
||||
|
||||
// if object is wanted but impossible, throw an exception
|
||||
if (JSON_UNLIKELY(manual_type == value_t::object and not is_an_object))
|
||||
if (JSON_HEDLEY_UNLIKELY(manual_type == value_t::object and not is_an_object))
|
||||
{
|
||||
JSON_THROW(type_error::create(301, "cannot create object from initializer list"));
|
||||
}
|
||||
|
@ -1494,7 +1495,7 @@ class basic_json
|
|||
|
||||
@since version 1.0.0
|
||||
*/
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json array(initializer_list_t init = {})
|
||||
{
|
||||
return basic_json(init, false, value_t::array);
|
||||
|
@ -1538,7 +1539,7 @@ class basic_json
|
|||
|
||||
@since version 1.0.0
|
||||
*/
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json object(initializer_list_t init = {})
|
||||
{
|
||||
return basic_json(init, false, value_t::object);
|
||||
|
@ -1637,7 +1638,7 @@ class basic_json
|
|||
assert(last.m_object != nullptr);
|
||||
|
||||
// make sure iterator fits the current value
|
||||
if (JSON_UNLIKELY(first.m_object != last.m_object))
|
||||
if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(201, "iterators are not compatible"));
|
||||
}
|
||||
|
@ -1654,8 +1655,8 @@ class basic_json
|
|||
case value_t::number_unsigned:
|
||||
case value_t::string:
|
||||
{
|
||||
if (JSON_UNLIKELY(not first.m_it.primitive_iterator.is_begin()
|
||||
or not last.m_it.primitive_iterator.is_end()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not first.m_it.primitive_iterator.is_begin()
|
||||
or not last.m_it.primitive_iterator.is_end()))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(204, "iterators out of range"));
|
||||
}
|
||||
|
@ -2368,7 +2369,7 @@ class basic_json
|
|||
/// get a boolean (explicit)
|
||||
boolean_t get_impl(boolean_t* /*unused*/) const
|
||||
{
|
||||
if (JSON_LIKELY(is_boolean()))
|
||||
if (JSON_HEDLEY_LIKELY(is_boolean()))
|
||||
{
|
||||
return m_value.boolean;
|
||||
}
|
||||
|
@ -2477,7 +2478,7 @@ class basic_json
|
|||
// delegate the call to get_ptr<>()
|
||||
auto ptr = obj.template get_ptr<typename std::add_pointer<ReferenceType>::type>();
|
||||
|
||||
if (JSON_LIKELY(ptr != nullptr))
|
||||
if (JSON_HEDLEY_LIKELY(ptr != nullptr))
|
||||
{
|
||||
return *ptr;
|
||||
}
|
||||
|
@ -2684,6 +2685,19 @@ class basic_json
|
|||
return v;
|
||||
}
|
||||
|
||||
template <
|
||||
typename T, std::size_t N,
|
||||
typename Array = T (&)[N],
|
||||
detail::enable_if_t <
|
||||
detail::has_from_json<basic_json_t, Array>::value, int > = 0 >
|
||||
Array get_to(T (&v)[N]) const
|
||||
noexcept(noexcept(JSONSerializer<Array>::from_json(
|
||||
std::declval<const basic_json_t&>(), v)))
|
||||
{
|
||||
JSONSerializer<Array>::from_json(*this, v);
|
||||
return v;
|
||||
}
|
||||
|
||||
|
||||
/*!
|
||||
@brief get a pointer value (implicit)
|
||||
|
@ -2915,7 +2929,7 @@ class basic_json
|
|||
reference at(size_type idx)
|
||||
{
|
||||
// at only works for arrays
|
||||
if (JSON_LIKELY(is_array()))
|
||||
if (JSON_HEDLEY_LIKELY(is_array()))
|
||||
{
|
||||
JSON_TRY
|
||||
{
|
||||
|
@ -2962,7 +2976,7 @@ class basic_json
|
|||
const_reference at(size_type idx) const
|
||||
{
|
||||
// at only works for arrays
|
||||
if (JSON_LIKELY(is_array()))
|
||||
if (JSON_HEDLEY_LIKELY(is_array()))
|
||||
{
|
||||
JSON_TRY
|
||||
{
|
||||
|
@ -3013,7 +3027,7 @@ class basic_json
|
|||
reference at(const typename object_t::key_type& key)
|
||||
{
|
||||
// at only works for objects
|
||||
if (JSON_LIKELY(is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(is_object()))
|
||||
{
|
||||
JSON_TRY
|
||||
{
|
||||
|
@ -3064,7 +3078,7 @@ class basic_json
|
|||
const_reference at(const typename object_t::key_type& key) const
|
||||
{
|
||||
// at only works for objects
|
||||
if (JSON_LIKELY(is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(is_object()))
|
||||
{
|
||||
JSON_TRY
|
||||
{
|
||||
|
@ -3118,7 +3132,7 @@ class basic_json
|
|||
}
|
||||
|
||||
// operator[] only works for arrays
|
||||
if (JSON_LIKELY(is_array()))
|
||||
if (JSON_HEDLEY_LIKELY(is_array()))
|
||||
{
|
||||
// fill up array with null values if given idx is outside range
|
||||
if (idx >= m_value.array->size())
|
||||
|
@ -3156,7 +3170,7 @@ class basic_json
|
|||
const_reference operator[](size_type idx) const
|
||||
{
|
||||
// const operator[] only works for arrays
|
||||
if (JSON_LIKELY(is_array()))
|
||||
if (JSON_HEDLEY_LIKELY(is_array()))
|
||||
{
|
||||
return m_value.array->operator[](idx);
|
||||
}
|
||||
|
@ -3202,7 +3216,7 @@ class basic_json
|
|||
}
|
||||
|
||||
// operator[] only works for objects
|
||||
if (JSON_LIKELY(is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(is_object()))
|
||||
{
|
||||
return m_value.object->operator[](key);
|
||||
}
|
||||
|
@ -3243,7 +3257,7 @@ class basic_json
|
|||
const_reference operator[](const typename object_t::key_type& key) const
|
||||
{
|
||||
// const operator[] only works for objects
|
||||
if (JSON_LIKELY(is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(is_object()))
|
||||
{
|
||||
assert(m_value.object->find(key) != m_value.object->end());
|
||||
return m_value.object->find(key)->second;
|
||||
|
@ -3280,6 +3294,7 @@ class basic_json
|
|||
@since version 1.1.0
|
||||
*/
|
||||
template<typename T>
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
reference operator[](T* key)
|
||||
{
|
||||
// implicitly convert null to object
|
||||
|
@ -3291,7 +3306,7 @@ class basic_json
|
|||
}
|
||||
|
||||
// at only works for objects
|
||||
if (JSON_LIKELY(is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(is_object()))
|
||||
{
|
||||
return m_value.object->operator[](key);
|
||||
}
|
||||
|
@ -3330,10 +3345,11 @@ class basic_json
|
|||
@since version 1.1.0
|
||||
*/
|
||||
template<typename T>
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
const_reference operator[](T* key) const
|
||||
{
|
||||
// at only works for objects
|
||||
if (JSON_LIKELY(is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(is_object()))
|
||||
{
|
||||
assert(m_value.object->find(key) != m_value.object->end());
|
||||
return m_value.object->find(key)->second;
|
||||
|
@ -3375,6 +3391,8 @@ class basic_json
|
|||
@return copy of the element at key @a key or @a default_value if @a key
|
||||
is not found
|
||||
|
||||
@throw type_error.302 if @a default_value does not match the type of the
|
||||
value at @a key
|
||||
@throw type_error.306 if the JSON value is not an object; in that case,
|
||||
using `value()` with a key makes no sense.
|
||||
|
||||
|
@ -3395,7 +3413,7 @@ class basic_json
|
|||
ValueType value(const typename object_t::key_type& key, const ValueType& default_value) const
|
||||
{
|
||||
// at only works for objects
|
||||
if (JSON_LIKELY(is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(is_object()))
|
||||
{
|
||||
// if key is found, return value and given default value otherwise
|
||||
const auto it = find(key);
|
||||
|
@ -3448,6 +3466,8 @@ class basic_json
|
|||
@return copy of the element at key @a key or @a default_value if @a key
|
||||
is not found
|
||||
|
||||
@throw type_error.302 if @a default_value does not match the type of the
|
||||
value at @a ptr
|
||||
@throw type_error.306 if the JSON value is not an object; in that case,
|
||||
using `value()` with a key makes no sense.
|
||||
|
||||
|
@ -3465,7 +3485,7 @@ class basic_json
|
|||
ValueType value(const json_pointer& ptr, const ValueType& default_value) const
|
||||
{
|
||||
// at only works for objects
|
||||
if (JSON_LIKELY(is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(is_object()))
|
||||
{
|
||||
// if pointer resolves a value, return it or use default value
|
||||
JSON_TRY
|
||||
|
@ -3485,6 +3505,7 @@ class basic_json
|
|||
@brief overload for a default value of type const char*
|
||||
@copydoc basic_json::value(const json_pointer&, ValueType) const
|
||||
*/
|
||||
JSON_HEDLEY_NON_NULL(3)
|
||||
string_t value(const json_pointer& ptr, const char* default_value) const
|
||||
{
|
||||
return value(ptr, string_t(default_value));
|
||||
|
@ -3629,7 +3650,7 @@ class basic_json
|
|||
IteratorType erase(IteratorType pos)
|
||||
{
|
||||
// make sure iterator fits the current value
|
||||
if (JSON_UNLIKELY(this != pos.m_object))
|
||||
if (JSON_HEDLEY_UNLIKELY(this != pos.m_object))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value"));
|
||||
}
|
||||
|
@ -3644,7 +3665,7 @@ class basic_json
|
|||
case value_t::number_unsigned:
|
||||
case value_t::string:
|
||||
{
|
||||
if (JSON_UNLIKELY(not pos.m_it.primitive_iterator.is_begin()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not pos.m_it.primitive_iterator.is_begin()))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(205, "iterator out of range"));
|
||||
}
|
||||
|
@ -3734,7 +3755,7 @@ class basic_json
|
|||
IteratorType erase(IteratorType first, IteratorType last)
|
||||
{
|
||||
// make sure iterator fits the current value
|
||||
if (JSON_UNLIKELY(this != first.m_object or this != last.m_object))
|
||||
if (JSON_HEDLEY_UNLIKELY(this != first.m_object or this != last.m_object))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(203, "iterators do not fit current value"));
|
||||
}
|
||||
|
@ -3749,8 +3770,8 @@ class basic_json
|
|||
case value_t::number_unsigned:
|
||||
case value_t::string:
|
||||
{
|
||||
if (JSON_LIKELY(not first.m_it.primitive_iterator.is_begin()
|
||||
or not last.m_it.primitive_iterator.is_end()))
|
||||
if (JSON_HEDLEY_LIKELY(not first.m_it.primitive_iterator.is_begin()
|
||||
or not last.m_it.primitive_iterator.is_end()))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(204, "iterators out of range"));
|
||||
}
|
||||
|
@ -3821,7 +3842,7 @@ class basic_json
|
|||
size_type erase(const typename object_t::key_type& key)
|
||||
{
|
||||
// this erase only works for objects
|
||||
if (JSON_LIKELY(is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(is_object()))
|
||||
{
|
||||
return m_value.object->erase(key);
|
||||
}
|
||||
|
@ -3856,9 +3877,9 @@ class basic_json
|
|||
void erase(const size_type idx)
|
||||
{
|
||||
// this erase only works for arrays
|
||||
if (JSON_LIKELY(is_array()))
|
||||
if (JSON_HEDLEY_LIKELY(is_array()))
|
||||
{
|
||||
if (JSON_UNLIKELY(idx >= size()))
|
||||
if (JSON_HEDLEY_UNLIKELY(idx >= size()))
|
||||
{
|
||||
JSON_THROW(out_of_range::create(401, "array index " + std::to_string(idx) + " is out of range"));
|
||||
}
|
||||
|
@ -3984,15 +4005,48 @@ class basic_json
|
|||
@liveexample{The following code shows an example for `contains()`.,contains}
|
||||
|
||||
@sa @ref find(KeyT&&) -- returns an iterator to an object element
|
||||
@sa @ref contains(const json_pointer&) const -- checks the existence for a JSON pointer
|
||||
|
||||
@since version 3.6.0
|
||||
*/
|
||||
template<typename KeyT>
|
||||
bool contains(KeyT&& key) const
|
||||
template<typename KeyT, typename std::enable_if<
|
||||
not std::is_same<KeyT, json_pointer>::value, int>::type = 0>
|
||||
bool contains(KeyT && key) const
|
||||
{
|
||||
return is_object() and m_value.object->find(std::forward<KeyT>(key)) != m_value.object->end();
|
||||
}
|
||||
|
||||
/*!
|
||||
@brief check the existence of an element in a JSON object given a JSON pointer
|
||||
|
||||
Check wehther the given JSON pointer @a ptr can be resolved in the current
|
||||
JSON value.
|
||||
|
||||
@note This method can be executed on any JSON value type.
|
||||
|
||||
@param[in] ptr JSON pointer to check its existence.
|
||||
|
||||
@return true if the JSON pointer can be resolved to a stored value, false
|
||||
otherwise.
|
||||
|
||||
@post If `j.contains(ptr)` returns true, it is safe to call `j[ptr]`.
|
||||
|
||||
@throw parse_error.106 if an array index begins with '0'
|
||||
@throw parse_error.109 if an array index was not a number
|
||||
|
||||
@complexity Logarithmic in the size of the JSON object.
|
||||
|
||||
@liveexample{The following code shows an example for `contains()`.,contains_json_pointer}
|
||||
|
||||
@sa @ref contains(KeyT &&) const -- checks the existence of a key
|
||||
|
||||
@since version 3.7.0
|
||||
*/
|
||||
bool contains(const json_pointer& ptr) const
|
||||
{
|
||||
return ptr.contains(this);
|
||||
}
|
||||
|
||||
/// @}
|
||||
|
||||
|
||||
|
@ -4334,7 +4388,7 @@ class basic_json
|
|||
future 4.0.0 of the library. Please use @ref items() instead;
|
||||
that is, replace `json::iterator_wrapper(j)` with `j.items()`.
|
||||
*/
|
||||
JSON_DEPRECATED
|
||||
JSON_HEDLEY_DEPRECATED(3.1.0)
|
||||
static iteration_proxy<iterator> iterator_wrapper(reference ref) noexcept
|
||||
{
|
||||
return ref.items();
|
||||
|
@ -4343,7 +4397,7 @@ class basic_json
|
|||
/*!
|
||||
@copydoc iterator_wrapper(reference)
|
||||
*/
|
||||
JSON_DEPRECATED
|
||||
JSON_HEDLEY_DEPRECATED(3.1.0)
|
||||
static iteration_proxy<const_iterator> iterator_wrapper(const_reference ref) noexcept
|
||||
{
|
||||
return ref.items();
|
||||
|
@ -4762,7 +4816,7 @@ class basic_json
|
|||
void push_back(basic_json&& val)
|
||||
{
|
||||
// push_back only works for null objects or arrays
|
||||
if (JSON_UNLIKELY(not(is_null() or is_array())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not(is_null() or is_array())))
|
||||
{
|
||||
JSON_THROW(type_error::create(308, "cannot use push_back() with " + std::string(type_name())));
|
||||
}
|
||||
|
@ -4799,7 +4853,7 @@ class basic_json
|
|||
void push_back(const basic_json& val)
|
||||
{
|
||||
// push_back only works for null objects or arrays
|
||||
if (JSON_UNLIKELY(not(is_null() or is_array())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not(is_null() or is_array())))
|
||||
{
|
||||
JSON_THROW(type_error::create(308, "cannot use push_back() with " + std::string(type_name())));
|
||||
}
|
||||
|
@ -4849,7 +4903,7 @@ class basic_json
|
|||
void push_back(const typename object_t::value_type& val)
|
||||
{
|
||||
// push_back only works for null objects or objects
|
||||
if (JSON_UNLIKELY(not(is_null() or is_object())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not(is_null() or is_object())))
|
||||
{
|
||||
JSON_THROW(type_error::create(308, "cannot use push_back() with " + std::string(type_name())));
|
||||
}
|
||||
|
@ -4935,6 +4989,8 @@ class basic_json
|
|||
@param[in] args arguments to forward to a constructor of @ref basic_json
|
||||
@tparam Args compatible types to create a @ref basic_json object
|
||||
|
||||
@return reference to the inserted element
|
||||
|
||||
@throw type_error.311 when called on a type other than JSON array or
|
||||
null; example: `"cannot use emplace_back() with number"`
|
||||
|
||||
|
@ -4944,13 +5000,13 @@ class basic_json
|
|||
elements to a JSON array. Note how the `null` value was silently converted
|
||||
to a JSON array.,emplace_back}
|
||||
|
||||
@since version 2.0.8
|
||||
@since version 2.0.8, returns reference since 3.7.0
|
||||
*/
|
||||
template<class... Args>
|
||||
void emplace_back(Args&& ... args)
|
||||
reference emplace_back(Args&& ... args)
|
||||
{
|
||||
// emplace_back only works for null objects or arrays
|
||||
if (JSON_UNLIKELY(not(is_null() or is_array())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not(is_null() or is_array())))
|
||||
{
|
||||
JSON_THROW(type_error::create(311, "cannot use emplace_back() with " + std::string(type_name())));
|
||||
}
|
||||
|
@ -4964,7 +5020,12 @@ class basic_json
|
|||
}
|
||||
|
||||
// add element to array (perfect forwarding)
|
||||
#ifdef JSON_HAS_CPP_17
|
||||
return m_value.array->emplace_back(std::forward<Args>(args)...);
|
||||
#else
|
||||
m_value.array->emplace_back(std::forward<Args>(args)...);
|
||||
return m_value.array->back();
|
||||
#endif
|
||||
}
|
||||
|
||||
/*!
|
||||
|
@ -4998,7 +5059,7 @@ class basic_json
|
|||
std::pair<iterator, bool> emplace(Args&& ... args)
|
||||
{
|
||||
// emplace only works for null objects or arrays
|
||||
if (JSON_UNLIKELY(not(is_null() or is_object())))
|
||||
if (JSON_HEDLEY_UNLIKELY(not(is_null() or is_object())))
|
||||
{
|
||||
JSON_THROW(type_error::create(311, "cannot use emplace() with " + std::string(type_name())));
|
||||
}
|
||||
|
@ -5066,10 +5127,10 @@ class basic_json
|
|||
iterator insert(const_iterator pos, const basic_json& val)
|
||||
{
|
||||
// insert only works for arrays
|
||||
if (JSON_LIKELY(is_array()))
|
||||
if (JSON_HEDLEY_LIKELY(is_array()))
|
||||
{
|
||||
// check if iterator pos fits to this JSON value
|
||||
if (JSON_UNLIKELY(pos.m_object != this))
|
||||
if (JSON_HEDLEY_UNLIKELY(pos.m_object != this))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value"));
|
||||
}
|
||||
|
@ -5117,10 +5178,10 @@ class basic_json
|
|||
iterator insert(const_iterator pos, size_type cnt, const basic_json& val)
|
||||
{
|
||||
// insert only works for arrays
|
||||
if (JSON_LIKELY(is_array()))
|
||||
if (JSON_HEDLEY_LIKELY(is_array()))
|
||||
{
|
||||
// check if iterator pos fits to this JSON value
|
||||
if (JSON_UNLIKELY(pos.m_object != this))
|
||||
if (JSON_HEDLEY_UNLIKELY(pos.m_object != this))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value"));
|
||||
}
|
||||
|
@ -5165,24 +5226,24 @@ class basic_json
|
|||
iterator insert(const_iterator pos, const_iterator first, const_iterator last)
|
||||
{
|
||||
// insert only works for arrays
|
||||
if (JSON_UNLIKELY(not is_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not is_array()))
|
||||
{
|
||||
JSON_THROW(type_error::create(309, "cannot use insert() with " + std::string(type_name())));
|
||||
}
|
||||
|
||||
// check if iterator pos fits to this JSON value
|
||||
if (JSON_UNLIKELY(pos.m_object != this))
|
||||
if (JSON_HEDLEY_UNLIKELY(pos.m_object != this))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value"));
|
||||
}
|
||||
|
||||
// check if range iterators belong to the same JSON object
|
||||
if (JSON_UNLIKELY(first.m_object != last.m_object))
|
||||
if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(210, "iterators do not fit"));
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(first.m_object == this))
|
||||
if (JSON_HEDLEY_UNLIKELY(first.m_object == this))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(211, "passed iterators may not belong to container"));
|
||||
}
|
||||
|
@ -5218,13 +5279,13 @@ class basic_json
|
|||
iterator insert(const_iterator pos, initializer_list_t ilist)
|
||||
{
|
||||
// insert only works for arrays
|
||||
if (JSON_UNLIKELY(not is_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not is_array()))
|
||||
{
|
||||
JSON_THROW(type_error::create(309, "cannot use insert() with " + std::string(type_name())));
|
||||
}
|
||||
|
||||
// check if iterator pos fits to this JSON value
|
||||
if (JSON_UNLIKELY(pos.m_object != this))
|
||||
if (JSON_HEDLEY_UNLIKELY(pos.m_object != this))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value"));
|
||||
}
|
||||
|
@ -5259,19 +5320,19 @@ class basic_json
|
|||
void insert(const_iterator first, const_iterator last)
|
||||
{
|
||||
// insert only works for objects
|
||||
if (JSON_UNLIKELY(not is_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not is_object()))
|
||||
{
|
||||
JSON_THROW(type_error::create(309, "cannot use insert() with " + std::string(type_name())));
|
||||
}
|
||||
|
||||
// check if range iterators belong to the same JSON object
|
||||
if (JSON_UNLIKELY(first.m_object != last.m_object))
|
||||
if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(210, "iterators do not fit"));
|
||||
}
|
||||
|
||||
// passed iterators must belong to objects
|
||||
if (JSON_UNLIKELY(not first.m_object->is_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not first.m_object->is_object()))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(202, "iterators first and last must point to objects"));
|
||||
}
|
||||
|
@ -5308,11 +5369,11 @@ class basic_json
|
|||
assert_invariant();
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not is_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not is_object()))
|
||||
{
|
||||
JSON_THROW(type_error::create(312, "cannot use update() with " + std::string(type_name())));
|
||||
}
|
||||
if (JSON_UNLIKELY(not j.is_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not j.is_object()))
|
||||
{
|
||||
JSON_THROW(type_error::create(312, "cannot use update() with " + std::string(j.type_name())));
|
||||
}
|
||||
|
@ -5359,20 +5420,20 @@ class basic_json
|
|||
assert_invariant();
|
||||
}
|
||||
|
||||
if (JSON_UNLIKELY(not is_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not is_object()))
|
||||
{
|
||||
JSON_THROW(type_error::create(312, "cannot use update() with " + std::string(type_name())));
|
||||
}
|
||||
|
||||
// check if range iterators belong to the same JSON object
|
||||
if (JSON_UNLIKELY(first.m_object != last.m_object))
|
||||
if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(210, "iterators do not fit"));
|
||||
}
|
||||
|
||||
// passed iterators must belong to objects
|
||||
if (JSON_UNLIKELY(not first.m_object->is_object()
|
||||
or not last.m_object->is_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not first.m_object->is_object()
|
||||
or not last.m_object->is_object()))
|
||||
{
|
||||
JSON_THROW(invalid_iterator::create(202, "iterators first and last must point to objects"));
|
||||
}
|
||||
|
@ -5435,7 +5496,7 @@ class basic_json
|
|||
void swap(array_t& other)
|
||||
{
|
||||
// swap only works for arrays
|
||||
if (JSON_LIKELY(is_array()))
|
||||
if (JSON_HEDLEY_LIKELY(is_array()))
|
||||
{
|
||||
std::swap(*(m_value.array), other);
|
||||
}
|
||||
|
@ -5468,7 +5529,7 @@ class basic_json
|
|||
void swap(object_t& other)
|
||||
{
|
||||
// swap only works for objects
|
||||
if (JSON_LIKELY(is_object()))
|
||||
if (JSON_HEDLEY_LIKELY(is_object()))
|
||||
{
|
||||
std::swap(*(m_value.object), other);
|
||||
}
|
||||
|
@ -5501,7 +5562,7 @@ class basic_json
|
|||
void swap(string_t& other)
|
||||
{
|
||||
// swap only works for strings
|
||||
if (JSON_LIKELY(is_string()))
|
||||
if (JSON_HEDLEY_LIKELY(is_string()))
|
||||
{
|
||||
std::swap(*(m_value.string), other);
|
||||
}
|
||||
|
@ -5733,25 +5794,25 @@ class basic_json
|
|||
return (*lhs.m_value.array) < (*rhs.m_value.array);
|
||||
|
||||
case value_t::object:
|
||||
return *lhs.m_value.object < *rhs.m_value.object;
|
||||
return (*lhs.m_value.object) < (*rhs.m_value.object);
|
||||
|
||||
case value_t::null:
|
||||
return false;
|
||||
|
||||
case value_t::string:
|
||||
return *lhs.m_value.string < *rhs.m_value.string;
|
||||
return (*lhs.m_value.string) < (*rhs.m_value.string);
|
||||
|
||||
case value_t::boolean:
|
||||
return lhs.m_value.boolean < rhs.m_value.boolean;
|
||||
return (lhs.m_value.boolean) < (rhs.m_value.boolean);
|
||||
|
||||
case value_t::number_integer:
|
||||
return lhs.m_value.number_integer < rhs.m_value.number_integer;
|
||||
return (lhs.m_value.number_integer) < (rhs.m_value.number_integer);
|
||||
|
||||
case value_t::number_unsigned:
|
||||
return lhs.m_value.number_unsigned < rhs.m_value.number_unsigned;
|
||||
return (lhs.m_value.number_unsigned) < (rhs.m_value.number_unsigned);
|
||||
|
||||
case value_t::number_float:
|
||||
return lhs.m_value.number_float < rhs.m_value.number_float;
|
||||
return (lhs.m_value.number_float) < (rhs.m_value.number_float);
|
||||
|
||||
default:
|
||||
return false;
|
||||
|
@ -6011,7 +6072,7 @@ class basic_json
|
|||
instead; that is, replace calls like `j >> o;` with `o << j;`.
|
||||
@since version 1.0.0; deprecated since version 3.0.0
|
||||
*/
|
||||
JSON_DEPRECATED
|
||||
JSON_HEDLEY_DEPRECATED(3.0.0)
|
||||
friend std::ostream& operator>>(const basic_json& j, std::ostream& o)
|
||||
{
|
||||
return o << j;
|
||||
|
@ -6090,7 +6151,7 @@ class basic_json
|
|||
|
||||
@since version 2.0.3 (contiguous containers)
|
||||
*/
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json parse(detail::input_adapter&& i,
|
||||
const parser_callback_t cb = nullptr,
|
||||
const bool allow_exceptions = true)
|
||||
|
@ -6159,6 +6220,7 @@ class basic_json
|
|||
@since version 3.2.0
|
||||
*/
|
||||
template <typename SAX>
|
||||
JSON_HEDLEY_NON_NULL(2)
|
||||
static bool sax_parse(detail::input_adapter&& i, SAX* sax,
|
||||
input_format_t format = input_format_t::json,
|
||||
const bool strict = true)
|
||||
|
@ -6244,6 +6306,7 @@ class basic_json
|
|||
std::is_base_of<
|
||||
std::random_access_iterator_tag,
|
||||
typename std::iterator_traits<IteratorType>::iterator_category>::value, int>::type = 0>
|
||||
JSON_HEDLEY_NON_NULL(3)
|
||||
static bool sax_parse(IteratorType first, IteratorType last, SAX* sax)
|
||||
{
|
||||
return parser(detail::input_adapter(first, last)).sax_parse(sax);
|
||||
|
@ -6257,7 +6320,7 @@ class basic_json
|
|||
instead; that is, replace calls like `j << i;` with `i >> j;`.
|
||||
@since version 1.0.0; deprecated since version 3.0.0
|
||||
*/
|
||||
JSON_DEPRECATED
|
||||
JSON_HEDLEY_DEPRECATED(3.0.0)
|
||||
friend std::istream& operator<<(basic_json& j, std::istream& i)
|
||||
{
|
||||
return operator>>(i, j);
|
||||
|
@ -6330,6 +6393,7 @@ class basic_json
|
|||
@since version 1.0.0, public since 2.1.0, `const char*` and `noexcept`
|
||||
since 3.0.0
|
||||
*/
|
||||
JSON_HEDLEY_RETURNS_NON_NULL
|
||||
const char* type_name() const noexcept
|
||||
{
|
||||
{
|
||||
|
@ -6859,7 +6923,7 @@ class basic_json
|
|||
@a strict parameter since 3.0.0; added @a allow_exceptions parameter
|
||||
since 3.2.0
|
||||
*/
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json from_cbor(detail::input_adapter&& i,
|
||||
const bool strict = true,
|
||||
const bool allow_exceptions = true)
|
||||
|
@ -6875,7 +6939,7 @@ class basic_json
|
|||
*/
|
||||
template<typename A1, typename A2,
|
||||
detail::enable_if_t<std::is_constructible<detail::input_adapter, A1, A2>::value, int> = 0>
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json from_cbor(A1 && a1, A2 && a2,
|
||||
const bool strict = true,
|
||||
const bool allow_exceptions = true)
|
||||
|
@ -6968,7 +7032,7 @@ class basic_json
|
|||
@a strict parameter since 3.0.0; added @a allow_exceptions parameter
|
||||
since 3.2.0
|
||||
*/
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json from_msgpack(detail::input_adapter&& i,
|
||||
const bool strict = true,
|
||||
const bool allow_exceptions = true)
|
||||
|
@ -6984,7 +7048,7 @@ class basic_json
|
|||
*/
|
||||
template<typename A1, typename A2,
|
||||
detail::enable_if_t<std::is_constructible<detail::input_adapter, A1, A2>::value, int> = 0>
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json from_msgpack(A1 && a1, A2 && a2,
|
||||
const bool strict = true,
|
||||
const bool allow_exceptions = true)
|
||||
|
@ -7056,7 +7120,7 @@ class basic_json
|
|||
|
||||
@since version 3.1.0; added @a allow_exceptions parameter since 3.2.0
|
||||
*/
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json from_ubjson(detail::input_adapter&& i,
|
||||
const bool strict = true,
|
||||
const bool allow_exceptions = true)
|
||||
|
@ -7072,7 +7136,7 @@ class basic_json
|
|||
*/
|
||||
template<typename A1, typename A2,
|
||||
detail::enable_if_t<std::is_constructible<detail::input_adapter, A1, A2>::value, int> = 0>
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json from_ubjson(A1 && a1, A2 && a2,
|
||||
const bool strict = true,
|
||||
const bool allow_exceptions = true)
|
||||
|
@ -7143,7 +7207,7 @@ class basic_json
|
|||
@sa @ref from_ubjson(detail::input_adapter&&, const bool, const bool) for the
|
||||
related UBJSON format
|
||||
*/
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json from_bson(detail::input_adapter&& i,
|
||||
const bool strict = true,
|
||||
const bool allow_exceptions = true)
|
||||
|
@ -7159,7 +7223,7 @@ class basic_json
|
|||
*/
|
||||
template<typename A1, typename A2,
|
||||
detail::enable_if_t<std::is_constructible<detail::input_adapter, A1, A2>::value, int> = 0>
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json from_bson(A1 && a1, A2 && a2,
|
||||
const bool strict = true,
|
||||
const bool allow_exceptions = true)
|
||||
|
@ -7533,7 +7597,7 @@ class basic_json
|
|||
else
|
||||
{
|
||||
const auto idx = json_pointer::array_index(last_path);
|
||||
if (JSON_UNLIKELY(static_cast<size_type>(idx) > parent.size()))
|
||||
if (JSON_HEDLEY_UNLIKELY(static_cast<size_type>(idx) > parent.size()))
|
||||
{
|
||||
// avoid undefined behavior
|
||||
JSON_THROW(out_of_range::create(401, "array index " + std::to_string(idx) + " is out of range"));
|
||||
|
@ -7564,7 +7628,7 @@ class basic_json
|
|||
{
|
||||
// perform range check
|
||||
auto it = parent.find(last_path);
|
||||
if (JSON_LIKELY(it != parent.end()))
|
||||
if (JSON_HEDLEY_LIKELY(it != parent.end()))
|
||||
{
|
||||
parent.erase(it);
|
||||
}
|
||||
|
@ -7581,7 +7645,7 @@ class basic_json
|
|||
};
|
||||
|
||||
// type check: top level value must be an array
|
||||
if (JSON_UNLIKELY(not json_patch.is_array()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not json_patch.is_array()))
|
||||
{
|
||||
JSON_THROW(parse_error::create(104, 0, "JSON patch must be an array of objects"));
|
||||
}
|
||||
|
@ -7601,13 +7665,13 @@ class basic_json
|
|||
const auto error_msg = (op == "op") ? "operation" : "operation '" + op + "'";
|
||||
|
||||
// check if desired value is present
|
||||
if (JSON_UNLIKELY(it == val.m_value.object->end()))
|
||||
if (JSON_HEDLEY_UNLIKELY(it == val.m_value.object->end()))
|
||||
{
|
||||
JSON_THROW(parse_error::create(105, 0, error_msg + " must have member '" + member + "'"));
|
||||
}
|
||||
|
||||
// check if result is of type string
|
||||
if (JSON_UNLIKELY(string_type and not it->second.is_string()))
|
||||
if (JSON_HEDLEY_UNLIKELY(string_type and not it->second.is_string()))
|
||||
{
|
||||
JSON_THROW(parse_error::create(105, 0, error_msg + " must have string member '" + member + "'"));
|
||||
}
|
||||
|
@ -7617,7 +7681,7 @@ class basic_json
|
|||
};
|
||||
|
||||
// type check: every element of the array must be an object
|
||||
if (JSON_UNLIKELY(not val.is_object()))
|
||||
if (JSON_HEDLEY_UNLIKELY(not val.is_object()))
|
||||
{
|
||||
JSON_THROW(parse_error::create(104, 0, "JSON patch must be an array of objects"));
|
||||
}
|
||||
|
@ -7695,7 +7759,7 @@ class basic_json
|
|||
}
|
||||
|
||||
// throw an exception if test fails
|
||||
if (JSON_UNLIKELY(not success))
|
||||
if (JSON_HEDLEY_UNLIKELY(not success))
|
||||
{
|
||||
JSON_THROW(other_error::create(501, "unsuccessful: " + val.dump()));
|
||||
}
|
||||
|
@ -7748,7 +7812,7 @@ class basic_json
|
|||
|
||||
@since version 2.0.0
|
||||
*/
|
||||
JSON_NODISCARD
|
||||
JSON_HEDLEY_WARN_UNUSED_RESULT
|
||||
static basic_json diff(const basic_json& source, const basic_json& target,
|
||||
const std::string& path = "")
|
||||
{
|
||||
|
@ -7952,6 +8016,21 @@ class basic_json
|
|||
|
||||
/// @}
|
||||
};
|
||||
|
||||
/*!
|
||||
@brief user-defined to_string function for JSON values
|
||||
|
||||
This function implements a user-defined to_string for JSON objects.
|
||||
|
||||
@param[in] j a JSON object
|
||||
@return a std::string object
|
||||
*/
|
||||
|
||||
NLOHMANN_BASIC_JSON_TPL_DECLARATION
|
||||
std::string to_string(const NLOHMANN_BASIC_JSON_TPL& j)
|
||||
{
|
||||
return j.dump();
|
||||
}
|
||||
} // namespace nlohmann
|
||||
|
||||
///////////////////////
|
||||
|
@ -8025,6 +8104,7 @@ if no parse error occurred.
|
|||
|
||||
@since version 1.0.0
|
||||
*/
|
||||
JSON_HEDLEY_NON_NULL(1)
|
||||
inline nlohmann::json operator "" _json(const char* s, std::size_t n)
|
||||
{
|
||||
return nlohmann::json::parse(s, s + n);
|
||||
|
@ -8043,6 +8123,7 @@ object if no parse error occurred.
|
|||
|
||||
@since version 2.0.0
|
||||
*/
|
||||
JSON_HEDLEY_NON_NULL(1)
|
||||
inline nlohmann::json::json_pointer operator "" _json_pointer(const char* s, std::size_t n)
|
||||
{
|
||||
return nlohmann::json::json_pointer(std::string(s, n));
|
||||
|
|
1505
include/nlohmann/thirdparty/hedley/hedley.hpp
vendored
Normal file
1505
include/nlohmann/thirdparty/hedley/hedley.hpp
vendored
Normal file
File diff suppressed because it is too large
Load diff
121
include/nlohmann/thirdparty/hedley/hedley_undef.hpp
vendored
Normal file
121
include/nlohmann/thirdparty/hedley/hedley_undef.hpp
vendored
Normal file
|
@ -0,0 +1,121 @@
|
|||
#undef JSON_HEDLEY_ALWAYS_INLINE
|
||||
#undef JSON_HEDLEY_ARM_VERSION
|
||||
#undef JSON_HEDLEY_ARM_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_ARRAY_PARAM
|
||||
#undef JSON_HEDLEY_ASSUME
|
||||
#undef JSON_HEDLEY_BEGIN_C_DECLS
|
||||
#undef JSON_HEDLEY_CLANG_HAS_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_CLANG_HAS_BUILTIN
|
||||
#undef JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_CLANG_HAS_EXTENSION
|
||||
#undef JSON_HEDLEY_CLANG_HAS_FEATURE
|
||||
#undef JSON_HEDLEY_CLANG_HAS_WARNING
|
||||
#undef JSON_HEDLEY_COMPCERT_VERSION
|
||||
#undef JSON_HEDLEY_COMPCERT_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_CONCAT
|
||||
#undef JSON_HEDLEY_CONCAT_EX
|
||||
#undef JSON_HEDLEY_CONST
|
||||
#undef JSON_HEDLEY_CONSTEXPR
|
||||
#undef JSON_HEDLEY_CONST_CAST
|
||||
#undef JSON_HEDLEY_CPP_CAST
|
||||
#undef JSON_HEDLEY_CRAY_VERSION
|
||||
#undef JSON_HEDLEY_CRAY_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_C_DECL
|
||||
#undef JSON_HEDLEY_DEPRECATED
|
||||
#undef JSON_HEDLEY_DEPRECATED_FOR
|
||||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL
|
||||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED
|
||||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS
|
||||
#undef JSON_HEDLEY_DIAGNOSTIC_POP
|
||||
#undef JSON_HEDLEY_DIAGNOSTIC_PUSH
|
||||
#undef JSON_HEDLEY_DMC_VERSION
|
||||
#undef JSON_HEDLEY_DMC_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_EMSCRIPTEN_VERSION
|
||||
#undef JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_END_C_DECLS
|
||||
#undef JSON_HEDLEY_FALL_THROUGH
|
||||
#undef JSON_HEDLEY_FLAGS
|
||||
#undef JSON_HEDLEY_FLAGS_CAST
|
||||
#undef JSON_HEDLEY_GCC_HAS_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_GCC_HAS_BUILTIN
|
||||
#undef JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_GCC_HAS_EXTENSION
|
||||
#undef JSON_HEDLEY_GCC_HAS_FEATURE
|
||||
#undef JSON_HEDLEY_GCC_HAS_WARNING
|
||||
#undef JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_GCC_VERSION
|
||||
#undef JSON_HEDLEY_GCC_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_GNUC_HAS_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_GNUC_HAS_BUILTIN
|
||||
#undef JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_GNUC_HAS_EXTENSION
|
||||
#undef JSON_HEDLEY_GNUC_HAS_FEATURE
|
||||
#undef JSON_HEDLEY_GNUC_HAS_WARNING
|
||||
#undef JSON_HEDLEY_GNUC_VERSION
|
||||
#undef JSON_HEDLEY_GNUC_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_HAS_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_HAS_BUILTIN
|
||||
#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE
|
||||
#undef JSON_HEDLEY_HAS_EXTENSION
|
||||
#undef JSON_HEDLEY_HAS_FEATURE
|
||||
#undef JSON_HEDLEY_HAS_WARNING
|
||||
#undef JSON_HEDLEY_IAR_VERSION
|
||||
#undef JSON_HEDLEY_IAR_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_IBM_VERSION
|
||||
#undef JSON_HEDLEY_IBM_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_IMPORT
|
||||
#undef JSON_HEDLEY_INLINE
|
||||
#undef JSON_HEDLEY_INTEL_VERSION
|
||||
#undef JSON_HEDLEY_INTEL_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_IS_CONSTANT
|
||||
#undef JSON_HEDLEY_LIKELY
|
||||
#undef JSON_HEDLEY_MALLOC
|
||||
#undef JSON_HEDLEY_MESSAGE
|
||||
#undef JSON_HEDLEY_MSVC_VERSION
|
||||
#undef JSON_HEDLEY_MSVC_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_NEVER_INLINE
|
||||
#undef JSON_HEDLEY_NON_NULL
|
||||
#undef JSON_HEDLEY_NO_RETURN
|
||||
#undef JSON_HEDLEY_NO_THROW
|
||||
#undef JSON_HEDLEY_PELLES_VERSION
|
||||
#undef JSON_HEDLEY_PELLES_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_PGI_VERSION
|
||||
#undef JSON_HEDLEY_PGI_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_PREDICT
|
||||
#undef JSON_HEDLEY_PRINTF_FORMAT
|
||||
#undef JSON_HEDLEY_PRIVATE
|
||||
#undef JSON_HEDLEY_PUBLIC
|
||||
#undef JSON_HEDLEY_PURE
|
||||
#undef JSON_HEDLEY_REINTERPRET_CAST
|
||||
#undef JSON_HEDLEY_REQUIRE
|
||||
#undef JSON_HEDLEY_REQUIRE_CONSTEXPR
|
||||
#undef JSON_HEDLEY_REQUIRE_MSG
|
||||
#undef JSON_HEDLEY_RESTRICT
|
||||
#undef JSON_HEDLEY_RETURNS_NON_NULL
|
||||
#undef JSON_HEDLEY_SENTINEL
|
||||
#undef JSON_HEDLEY_STATIC_ASSERT
|
||||
#undef JSON_HEDLEY_STATIC_CAST
|
||||
#undef JSON_HEDLEY_STRINGIFY
|
||||
#undef JSON_HEDLEY_STRINGIFY_EX
|
||||
#undef JSON_HEDLEY_SUNPRO_VERSION
|
||||
#undef JSON_HEDLEY_SUNPRO_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_TINYC_VERSION
|
||||
#undef JSON_HEDLEY_TINYC_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_TI_VERSION
|
||||
#undef JSON_HEDLEY_TI_VERSION_CHECK
|
||||
#undef JSON_HEDLEY_UNAVAILABLE
|
||||
#undef JSON_HEDLEY_UNLIKELY
|
||||
#undef JSON_HEDLEY_UNPREDICTABLE
|
||||
#undef JSON_HEDLEY_UNREACHABLE
|
||||
#undef JSON_HEDLEY_UNREACHABLE_RETURN
|
||||
#undef JSON_HEDLEY_VERSION
|
||||
#undef JSON_HEDLEY_VERSION_DECODE_MAJOR
|
||||
#undef JSON_HEDLEY_VERSION_DECODE_MINOR
|
||||
#undef JSON_HEDLEY_VERSION_DECODE_REVISION
|
||||
#undef JSON_HEDLEY_VERSION_ENCODE
|
||||
#undef JSON_HEDLEY_WARNING
|
||||
#undef JSON_HEDLEY_WARN_UNUSED_RESULT
|
|
@ -1,6 +1,6 @@
|
|||
project('nlohmann_json',
|
||||
'cpp',
|
||||
version : '3.6.1',
|
||||
version : '3.7.0',
|
||||
license : 'MIT',
|
||||
)
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -6,7 +6,7 @@ option(JSON_Coverage "Build test suite with coverage information" OFF)
|
|||
if(JSON_Sanitizer)
|
||||
message(STATUS "Building test suite with Clang sanitizer")
|
||||
if(NOT MSVC)
|
||||
set(CMAKE_CXX_FLAGS "-g -O2 -fsanitize=address -fsanitize=undefined -fno-omit-frame-pointer")
|
||||
set(CMAKE_CXX_FLAGS "-g -O2 -fsanitize=address -fsanitize=undefined -fsanitize=integer -fsanitize=nullability -fno-omit-frame-pointer")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
@ -22,7 +22,7 @@ if(JSON_NoExceptions)
|
|||
if(NOT MSVC)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DJSON_NOEXCEPTION")
|
||||
endif()
|
||||
set(CATCH_TEST_FILTER -e)
|
||||
set(DOCTEST_TEST_FILTER --no-throw)
|
||||
endif()
|
||||
|
||||
if(JSON_Coverage)
|
||||
|
@ -45,31 +45,38 @@ if(JSON_Coverage)
|
|||
# add target to collect coverage information and generate HTML file
|
||||
# (filter script from https://stackoverflow.com/a/43726240/266378)
|
||||
add_custom_target(lcov_html
|
||||
COMMAND lcov --directory . --capture --output-file json.info --gcov-tool ${GCOV_BIN} --rc lcov_branch_coverage=1
|
||||
COMMAND lcov -e json.info ${SOURCE_FILES} --output-file json.info.filtered --rc lcov_branch_coverage=1
|
||||
COMMAND lcov --directory . --capture --output-file json.info --rc lcov_branch_coverage=1
|
||||
COMMAND lcov -e json.info ${SOURCE_FILES} --output-file json.info.filtered --gcov-tool ${GCOV_BIN} --rc lcov_branch_coverage=1
|
||||
COMMAND ${CMAKE_SOURCE_DIR}/test/thirdparty/imapdl/filterbr.py json.info.filtered > json.info.filtered.noexcept
|
||||
COMMAND genhtml --title "JSON for Modern C++" --legend --demangle-cpp --output-directory html --show-details --branch-coverage json.info.filtered.noexcept
|
||||
COMMENT "Generating HTML report test/html/index.html"
|
||||
)
|
||||
|
||||
add_custom_target(fastcov_html
|
||||
COMMAND ${CMAKE_SOURCE_DIR}/test/thirdparty/fastcov/fastcov.py --branch-coverage --lcov -o json.info --gcov ${GCOV_BIN} --compiler-directory ${CMAKE_BINARY_DIR} --source-files ${SOURCE_FILES}
|
||||
COMMAND ${CMAKE_SOURCE_DIR}/test/thirdparty/imapdl/filterbr.py json.info > json.info.noexcept
|
||||
COMMAND genhtml --title "JSON for Modern C++" --legend --demangle-cpp --output-directory html --show-details --branch-coverage json.info.noexcept
|
||||
COMMENT "Generating HTML report test/html/index.html"
|
||||
)
|
||||
endif()
|
||||
|
||||
#############################################################################
|
||||
# Catch library with the main function to speed up build
|
||||
# doctest library with the main function to speed up build
|
||||
#############################################################################
|
||||
|
||||
add_library(catch_main OBJECT
|
||||
add_library(doctest_main OBJECT
|
||||
"src/unit.cpp"
|
||||
)
|
||||
set_target_properties(catch_main PROPERTIES
|
||||
set_target_properties(doctest_main PROPERTIES
|
||||
COMPILE_DEFINITIONS "$<$<CXX_COMPILER_ID:MSVC>:_SCL_SECURE_NO_WARNINGS>"
|
||||
COMPILE_OPTIONS "$<$<CXX_COMPILER_ID:MSVC>:/EHsc;$<$<CONFIG:Release>:/Od>>"
|
||||
)
|
||||
if (${CMAKE_VERSION} VERSION_LESS "3.8.0")
|
||||
target_compile_features(catch_main PUBLIC cxx_range_for)
|
||||
target_compile_features(doctest_main PUBLIC cxx_range_for)
|
||||
else()
|
||||
target_compile_features(catch_main PUBLIC cxx_std_11)
|
||||
target_compile_features(doctest_main PUBLIC cxx_std_11)
|
||||
endif()
|
||||
target_include_directories(catch_main PRIVATE "thirdparty/catch")
|
||||
target_include_directories(doctest_main PRIVATE "thirdparty/doctest")
|
||||
|
||||
# https://stackoverflow.com/questions/2368811/how-to-set-warning-level-in-cmake
|
||||
if(MSVC)
|
||||
|
@ -99,10 +106,9 @@ foreach(file ${files})
|
|||
get_filename_component(file_basename ${file} NAME_WE)
|
||||
string(REGEX REPLACE "unit-([^$]+)" "test-\\1" testcase ${file_basename})
|
||||
|
||||
add_executable(${testcase} $<TARGET_OBJECTS:catch_main> ${file})
|
||||
add_executable(${testcase} $<TARGET_OBJECTS:doctest_main> ${file})
|
||||
target_compile_definitions(${testcase} PRIVATE
|
||||
CATCH_CONFIG_FAST_COMPILE
|
||||
$<$<CXX_COMPILER_ID:MSVC>:_SCL_SECURE_NO_WARNINGS>
|
||||
DOCTEST_CONFIG_SUPER_FAST_ASSERTS
|
||||
)
|
||||
target_compile_options(${testcase} PRIVATE
|
||||
$<$<CXX_COMPILER_ID:MSVC>:/EHsc;$<$<CONFIG:Release>:/Od>>
|
||||
|
@ -110,26 +116,26 @@ foreach(file ${files})
|
|||
$<$<CXX_COMPILER_ID:GNU>:-Wno-deprecated-declarations>
|
||||
)
|
||||
target_include_directories(${testcase} PRIVATE
|
||||
thirdparty/catch
|
||||
thirdparty/doctest
|
||||
thirdparty/fifo_map
|
||||
)
|
||||
target_link_libraries(${testcase} ${NLOHMANN_JSON_TARGET_NAME})
|
||||
|
||||
add_test(NAME "${testcase}_default"
|
||||
COMMAND ${testcase} ${CATCH_TEST_FILTER}
|
||||
COMMAND ${testcase} ${DOCTEST_TEST_FILTER}
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
)
|
||||
set_tests_properties("${testcase}_default" PROPERTIES LABELS "default")
|
||||
|
||||
add_test(NAME "${testcase}_all"
|
||||
COMMAND ${testcase} ${CATCH_TEST_FILTER} "*"
|
||||
COMMAND ${testcase} ${DOCTEST_TEST_FILTER} --no-skip
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
)
|
||||
set_tests_properties("${testcase}_all" PROPERTIES LABELS "all")
|
||||
|
||||
if(JSON_Valgrind)
|
||||
add_test(NAME "${testcase}_valgrind"
|
||||
COMMAND ${memcheck_command} ${CMAKE_CURRENT_BINARY_DIR}/${testcase} ${CATCH_TEST_FILTER}
|
||||
COMMAND ${memcheck_command} ${CMAKE_CURRENT_BINARY_DIR}/${testcase} ${DOCTEST_TEST_FILTER}
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
)
|
||||
set_tests_properties("${testcase}_valgrind" PROPERTIES LABELS "valgrind")
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
# additional flags
|
||||
CXXFLAGS += -std=c++11 -Wall -Wextra -pedantic -Wcast-align -Wcast-qual -Wno-ctor-dtor-privacy -Wdisabled-optimization -Wformat=2 -Winit-self -Wmissing-declarations -Wmissing-include-dirs -Wold-style-cast -Woverloaded-virtual -Wredundant-decls -Wshadow -Wsign-conversion -Wsign-promo -Wstrict-overflow=5 -Wswitch -Wundef -Wno-unused -Wnon-virtual-dtor -Wreorder -Wdeprecated -Wno-float-equal
|
||||
CPPFLAGS += -I ../single_include -I . -I thirdparty/catch -I thirdparty/fifo_map -DCATCH_CONFIG_FAST_COMPILE
|
||||
CPPFLAGS += -I ../single_include -I . -I thirdparty/doctest -I thirdparty/fifo_map -DDOCTEST_CONFIG_SUPER_FAST_ASSERTS
|
||||
|
||||
SOURCES = src/unit.cpp \
|
||||
src/unit-algorithms.cpp \
|
||||
|
@ -63,11 +63,11 @@ clean:
|
|||
# single test file
|
||||
##############################################################################
|
||||
|
||||
json_unit: $(OBJECTS) ../single_include/nlohmann/json.hpp thirdparty/catch/catch.hpp
|
||||
json_unit: $(OBJECTS) ../single_include/nlohmann/json.hpp thirdparty/doctest/doctest.h
|
||||
@echo "[CXXLD] $@"
|
||||
@$(CXX) $(CXXFLAGS) $(LDFLAGS) $(OBJECTS) -o $@
|
||||
|
||||
%.o: %.cpp ../single_include/nlohmann/json.hpp thirdparty/catch/catch.hpp
|
||||
%.o: %.cpp ../single_include/nlohmann/json.hpp thirdparty/doctest/doctest.h
|
||||
@echo "[CXX] $@"
|
||||
@$(CXX) $(CXXFLAGS) $(CPPFLAGS) -c $< -o $@
|
||||
|
||||
|
@ -76,7 +76,7 @@ json_unit: $(OBJECTS) ../single_include/nlohmann/json.hpp thirdparty/catch/catch
|
|||
# individual test cases
|
||||
##############################################################################
|
||||
|
||||
test-%: src/unit-%.o src/unit.o ../single_include/nlohmann/json.hpp thirdparty/catch/catch.hpp
|
||||
test-%: src/unit-%.o src/unit.o ../single_include/nlohmann/json.hpp thirdparty/doctest/doctest.h
|
||||
@echo "[CXXLD] $@"
|
||||
@$(CXX) $(CXXFLAGS) $(CPPFLAGS) $(LDFLAGS) $< src/unit.o -o $@
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (fuzz test support)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
This file implements a driver for American Fuzzy Lop (afl-fuzz). It relies on
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (fuzz test support)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
This file implements a parser test suitable for fuzz testing. Given a byte
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (fuzz test support)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
This file implements a parser test suitable for fuzz testing. Given a byte
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (fuzz test support)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
This file implements a parser test suitable for fuzz testing. Given a byte
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (fuzz test support)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
This file implements a parser test suitable for fuzz testing. Given a byte
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (fuzz test support)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
This file implements a parser test suitable for fuzz testing. Given a byte
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,14 +27,16 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
namespace
|
||||
{
|
||||
// special test case to check if memory is leaked if constructor throws
|
||||
|
||||
template<class T>
|
||||
struct bad_allocator : std::allocator<T>
|
||||
{
|
||||
|
@ -44,6 +46,7 @@ struct bad_allocator : std::allocator<T>
|
|||
throw std::bad_alloc();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
TEST_CASE("bad_alloc")
|
||||
{
|
||||
|
@ -64,13 +67,17 @@ TEST_CASE("bad_alloc")
|
|||
}
|
||||
}
|
||||
|
||||
static bool next_construct_fails = false;
|
||||
static bool next_destroy_fails = false;
|
||||
static bool next_deallocate_fails = false;
|
||||
namespace
|
||||
{
|
||||
bool next_construct_fails = false;
|
||||
bool next_destroy_fails = false;
|
||||
bool next_deallocate_fails = false;
|
||||
|
||||
template<class T>
|
||||
struct my_allocator : std::allocator<T>
|
||||
{
|
||||
using std::allocator<T>::allocator;
|
||||
|
||||
template<class... Args>
|
||||
void construct(T* p, Args&& ... args)
|
||||
{
|
||||
|
@ -127,6 +134,7 @@ void my_allocator_clean_up(T* p)
|
|||
alloc.destroy(p);
|
||||
alloc.deallocate(p, 1);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("controlled bad_alloc")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,9 +27,10 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include <string>
|
||||
#include <utility>
|
||||
|
||||
|
@ -38,7 +39,6 @@ SOFTWARE.
|
|||
class alt_string;
|
||||
bool operator<(const char* op1, const alt_string& op2);
|
||||
|
||||
|
||||
/*
|
||||
* This is virtually a string class.
|
||||
* It covers std::string under the hood.
|
||||
|
@ -154,7 +154,6 @@ class alt_string
|
|||
friend bool ::operator<(const char*, const alt_string&);
|
||||
};
|
||||
|
||||
|
||||
using alt_json = nlohmann::basic_json <
|
||||
std::map,
|
||||
std::vector,
|
||||
|
@ -172,8 +171,6 @@ bool operator<(const char* op1, const alt_string& op2)
|
|||
return op1 < op2.str_impl;
|
||||
}
|
||||
|
||||
|
||||
|
||||
TEST_CASE("alternative string type")
|
||||
{
|
||||
SECTION("dump")
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,11 +27,13 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
|
||||
TEST_CASE("BSON")
|
||||
{
|
||||
|
@ -110,8 +112,9 @@ TEST_CASE("BSON")
|
|||
0x00,
|
||||
0x00, 0x00, 0x00, 0x80
|
||||
};
|
||||
CHECK_THROWS_AS(json::from_bson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_bson(v), "[json.exception.parse_error.112] parse error at byte 10: syntax error while parsing BSON string: string length must be at least 1, is -2147483648");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_bson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_bson(v), "[json.exception.parse_error.112] parse error at byte 10: syntax error while parsing BSON string: string length must be at least 1, is -2147483648");
|
||||
}
|
||||
|
||||
SECTION("objects")
|
||||
|
@ -606,6 +609,8 @@ TEST_CASE("BSON input/output_adapters")
|
|||
}
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
class SaxCountdown
|
||||
{
|
||||
public:
|
||||
|
@ -675,6 +680,7 @@ class SaxCountdown
|
|||
private:
|
||||
int events_left = 0;
|
||||
};
|
||||
}
|
||||
|
||||
TEST_CASE("Incomplete BSON Input")
|
||||
{
|
||||
|
@ -687,8 +693,9 @@ TEST_CASE("Incomplete BSON Input")
|
|||
'e', 'n', 't' // unexpected EOF
|
||||
};
|
||||
|
||||
CHECK_THROWS_AS(json::from_bson(incomplete_bson), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_bson(incomplete_bson),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_bson(incomplete_bson), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_bson(incomplete_bson),
|
||||
"[json.exception.parse_error.110] parse error at byte 9: syntax error while parsing BSON cstring: unexpected end of input");
|
||||
|
||||
CHECK(json::from_bson(incomplete_bson, true, false).is_discarded());
|
||||
|
@ -705,8 +712,9 @@ TEST_CASE("Incomplete BSON Input")
|
|||
0x08, // entry: boolean, unexpected EOF
|
||||
};
|
||||
|
||||
CHECK_THROWS_AS(json::from_bson(incomplete_bson), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_bson(incomplete_bson),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_bson(incomplete_bson), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_bson(incomplete_bson),
|
||||
"[json.exception.parse_error.110] parse error at byte 6: syntax error while parsing BSON cstring: unexpected end of input");
|
||||
CHECK(json::from_bson(incomplete_bson, true, false).is_discarded());
|
||||
|
||||
|
@ -728,8 +736,9 @@ TEST_CASE("Incomplete BSON Input")
|
|||
// missing input data...
|
||||
};
|
||||
|
||||
CHECK_THROWS_AS(json::from_bson(incomplete_bson), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_bson(incomplete_bson),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_bson(incomplete_bson), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_bson(incomplete_bson),
|
||||
"[json.exception.parse_error.110] parse error at byte 28: syntax error while parsing BSON element list: unexpected end of input");
|
||||
CHECK(json::from_bson(incomplete_bson, true, false).is_discarded());
|
||||
|
||||
|
@ -744,8 +753,9 @@ TEST_CASE("Incomplete BSON Input")
|
|||
0x0D, 0x00, // size (incomplete), unexpected EOF
|
||||
};
|
||||
|
||||
CHECK_THROWS_AS(json::from_bson(incomplete_bson), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_bson(incomplete_bson),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_bson(incomplete_bson), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_bson(incomplete_bson),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing BSON number: unexpected end of input");
|
||||
CHECK(json::from_bson(incomplete_bson, true, false).is_discarded());
|
||||
|
||||
|
@ -786,8 +796,9 @@ TEST_CASE("Unsupported BSON input")
|
|||
0x00 // end marker
|
||||
};
|
||||
|
||||
CHECK_THROWS_AS(json::from_bson(bson), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_bson(bson),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_bson(bson), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_bson(bson),
|
||||
"[json.exception.parse_error.114] parse error at byte 5: Unsupported BSON record type 0xFF");
|
||||
CHECK(json::from_bson(bson, true, false).is_discarded());
|
||||
|
||||
|
@ -1138,7 +1149,7 @@ TEST_CASE("BSON numerical data")
|
|||
};
|
||||
|
||||
CHECK_THROWS_AS(json::to_bson(j), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(json::to_bson(j), "[json.exception.out_of_range.407] integer number " + std::to_string(i) + " cannot be represented by BSON as it does not fit int64");
|
||||
CHECK_THROWS_WITH_STD_STR(json::to_bson(j), "[json.exception.out_of_range.407] integer number " + std::to_string(i) + " cannot be represented by BSON as it does not fit int64");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1146,7 +1157,7 @@ TEST_CASE("BSON numerical data")
|
|||
}
|
||||
}
|
||||
|
||||
TEST_CASE("BSON roundtrips", "[hide]")
|
||||
TEST_CASE("BSON roundtrips" * doctest::skip())
|
||||
{
|
||||
SECTION("reference files")
|
||||
{
|
||||
|
@ -1161,8 +1172,8 @@ TEST_CASE("BSON roundtrips", "[hide]")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
|
||||
SECTION(filename + ": std::vector<uint8_t>")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": std::vector<uint8_t>");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1179,8 +1190,8 @@ TEST_CASE("BSON roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": std::ifstream")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": std::ifstream");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1194,8 +1205,8 @@ TEST_CASE("BSON roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": uint8_t* and size")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": uint8_t* and size");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1212,8 +1223,8 @@ TEST_CASE("BSON roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": output to output adapters")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": output to output adapters");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1224,8 +1235,8 @@ TEST_CASE("BSON roundtrips", "[hide]")
|
|||
(std::istreambuf_iterator<char>(f_bson)),
|
||||
std::istreambuf_iterator<char>());
|
||||
|
||||
SECTION(filename + ": output adapters: std::vector<uint8_t>")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": output adapters: std::vector<uint8_t>");
|
||||
std::vector<uint8_t> vec;
|
||||
json::to_bson(j1, vec);
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,13 +27,19 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
DOCTEST_GCC_SUPPRESS_WARNING("-Wfloat-equal")
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
#include <set>
|
||||
|
||||
namespace
|
||||
{
|
||||
class SaxCountdown
|
||||
{
|
||||
public:
|
||||
|
@ -103,6 +109,7 @@ class SaxCountdown
|
|||
private:
|
||||
int events_left = 0;
|
||||
};
|
||||
}
|
||||
|
||||
TEST_CASE("CBOR")
|
||||
{
|
||||
|
@ -829,15 +836,17 @@ TEST_CASE("CBOR")
|
|||
{
|
||||
SECTION("no byte follows")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0xf9})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0xf9})),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0xf9})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0xf9})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK(json::from_cbor(std::vector<uint8_t>({0xf9}), true, false).is_discarded());
|
||||
}
|
||||
SECTION("only one byte follows")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0xf9, 0x7c})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0xf9, 0x7c})),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0xf9, 0x7c})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0xf9, 0x7c})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK(json::from_cbor(std::vector<uint8_t>({0xf9, 0x7c}), true, false).is_discarded());
|
||||
}
|
||||
|
@ -1312,86 +1321,88 @@ TEST_CASE("CBOR")
|
|||
{
|
||||
SECTION("empty byte vector")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>()), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>()),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>()), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>()),
|
||||
"[json.exception.parse_error.110] parse error at byte 1: syntax error while parsing CBOR value: unexpected end of input");
|
||||
CHECK(json::from_cbor(std::vector<uint8_t>(), true, false).is_discarded());
|
||||
}
|
||||
|
||||
SECTION("too short byte vector")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x18})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x19})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x19, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1a})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1a, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1a, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1a, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1b})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x62})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x62, 0x60})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x7F})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x7F, 0x60})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x82, 0x01})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x9F, 0x01})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0xBF, 0x61, 0x61, 0xF5})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0xA1, 0x61, 0X61})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0xBF, 0x61, 0X61})), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x18})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x19})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x19, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1a})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1a, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1a, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1a, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1b})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x62})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x62, 0x60})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x7F})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x7F, 0x60})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x82, 0x01})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x9F, 0x01})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0xBF, 0x61, 0x61, 0xF5})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0xA1, 0x61, 0X61})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0xBF, 0x61, 0X61})), json::parse_error&);
|
||||
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x18})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x18})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x19})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x19})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x19, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x19, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1a})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1a})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1a, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1a, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1a, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1a, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1a, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1a, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1b})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1b})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 6: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 7: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 8: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 9: syntax error while parsing CBOR number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x62})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x62})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR string: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x62, 0x60})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x62, 0x60})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing CBOR string: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x7F})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x7F})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR string: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x7F, 0x60})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x7F, 0x60})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing CBOR string: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x82, 0x01})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x82, 0x01})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing CBOR value: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x9F, 0x01})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x9F, 0x01})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing CBOR value: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0xBF, 0x61, 0x61, 0xF5})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0xBF, 0x61, 0x61, 0xF5})),
|
||||
"[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing CBOR string: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0xA1, 0x61, 0x61})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0xA1, 0x61, 0x61})),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing CBOR value: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0xBF, 0x61, 0x61})),
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0xBF, 0x61, 0x61})),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing CBOR value: unexpected end of input");
|
||||
|
||||
CHECK(json::from_cbor(std::vector<uint8_t>({0x18}), true, false).is_discarded());
|
||||
|
@ -1424,13 +1435,14 @@ TEST_CASE("CBOR")
|
|||
{
|
||||
SECTION("concrete examples")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0x1c})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0x1c})),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0x1c})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0x1c})),
|
||||
"[json.exception.parse_error.112] parse error at byte 1: syntax error while parsing CBOR value: invalid byte: 0x1C");
|
||||
CHECK(json::from_cbor(std::vector<uint8_t>({0x1c}), true, false).is_discarded());
|
||||
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0xf8})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0xf8})),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0xf8})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0xf8})),
|
||||
"[json.exception.parse_error.112] parse error at byte 1: syntax error while parsing CBOR value: invalid byte: 0xF8");
|
||||
CHECK(json::from_cbor(std::vector<uint8_t>({0xf8}), true, false).is_discarded());
|
||||
}
|
||||
|
@ -1481,7 +1493,8 @@ TEST_CASE("CBOR")
|
|||
0xf8
|
||||
})
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({static_cast<uint8_t>(byte)})), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({static_cast<uint8_t>(byte)})), json::parse_error&);
|
||||
CHECK(json::from_cbor(std::vector<uint8_t>({static_cast<uint8_t>(byte)}), true, false).is_discarded());
|
||||
}
|
||||
}
|
||||
|
@ -1489,8 +1502,9 @@ TEST_CASE("CBOR")
|
|||
|
||||
SECTION("invalid string in map")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_cbor(std::vector<uint8_t>({0xa1, 0xff, 0x01})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(std::vector<uint8_t>({0xa1, 0xff, 0x01})),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(std::vector<uint8_t>({0xa1, 0xff, 0x01})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(std::vector<uint8_t>({0xa1, 0xff, 0x01})),
|
||||
"[json.exception.parse_error.113] parse error at byte 2: syntax error while parsing CBOR string: expected length specification (0x60-0x7B) or indefinite string type (0x7F); last byte: 0xFF");
|
||||
CHECK(json::from_cbor(std::vector<uint8_t>({0xa1, 0xff, 0x01}), true, false).is_discarded());
|
||||
}
|
||||
|
@ -1507,8 +1521,9 @@ TEST_CASE("CBOR")
|
|||
|
||||
SECTION("strict mode")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_cbor(vec), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR value: expected end of input; last byte: 0xF6");
|
||||
CHECK(json::from_cbor(vec, true, false).is_discarded());
|
||||
}
|
||||
|
@ -1586,7 +1601,8 @@ TEST_CASE("single CBOR roundtrip")
|
|||
}
|
||||
}
|
||||
|
||||
TEST_CASE("CBOR regressions", "[!throws]")
|
||||
#if not defined(JSON_NOEXCEPTION)
|
||||
TEST_CASE("CBOR regressions")
|
||||
{
|
||||
SECTION("fuzz test results")
|
||||
{
|
||||
|
@ -1655,12 +1671,13 @@ TEST_CASE("CBOR regressions", "[!throws]")
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
TEST_CASE("CBOR roundtrips", "[hide]")
|
||||
TEST_CASE("CBOR roundtrips" * doctest::skip())
|
||||
{
|
||||
SECTION("input from flynn")
|
||||
{
|
||||
// most of these are exluded due to differences in key order (not a real problem)
|
||||
// most of these are excluded due to differences in key order (not a real problem)
|
||||
auto exclude_packed = std::set<std::string>
|
||||
{
|
||||
"test/data/json.org/1.json",
|
||||
|
@ -1827,8 +1844,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
|
||||
SECTION(filename + ": std::vector<uint8_t>")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": std::vector<uint8_t>");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1845,8 +1862,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": std::ifstream")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": std::ifstream");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1860,8 +1877,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": uint8_t* and size")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": uint8_t* and size");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1878,8 +1895,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": output to output adapters")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": output to output adapters");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1892,8 +1909,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
|
||||
if (!exclude_packed.count(filename))
|
||||
{
|
||||
SECTION(filename + ": output adapters: std::vector<uint8_t>")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": output adapters: std::vector<uint8_t>");
|
||||
std::vector<uint8_t> vec;
|
||||
json::to_cbor(j1, vec);
|
||||
CHECK(vec == packed);
|
||||
|
@ -1904,7 +1921,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
}
|
||||
}
|
||||
|
||||
TEST_CASE("all CBOR first bytes", "[!throws]")
|
||||
#if not defined(JSON_NOEXCEPTION)
|
||||
TEST_CASE("all CBOR first bytes")
|
||||
{
|
||||
// these bytes will fail immediately with exception parse_error.112
|
||||
std::set<uint8_t> unsupported =
|
||||
|
@ -1968,7 +1986,7 @@ TEST_CASE("all CBOR first bytes", "[!throws]")
|
|||
{
|
||||
// check that parse_error.112 is only thrown if the
|
||||
// first byte is in the unsupported set
|
||||
CAPTURE(e.what())
|
||||
INFO_WITH_TEMP(e.what());
|
||||
if (std::find(unsupported.begin(), unsupported.end(), byte) != unsupported.end())
|
||||
{
|
||||
CHECK(e.id == 112);
|
||||
|
@ -1980,6 +1998,7 @@ TEST_CASE("all CBOR first bytes", "[!throws]")
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
TEST_CASE("examples from RFC 7049 Appendix A")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,11 +27,12 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
TEST_CASE("const_iterator class")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,11 +27,12 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
TEST_CASE("iterator class")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,18 +27,22 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
namespace
|
||||
{
|
||||
// shortcut to scan a string literal
|
||||
json::lexer::token_type scan_string(const char* s);
|
||||
json::lexer::token_type scan_string(const char* s)
|
||||
{
|
||||
return json::lexer(nlohmann::detail::input_adapter(s)).scan();
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("lexer class")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,14 +27,17 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
#include <valarray>
|
||||
|
||||
namespace
|
||||
{
|
||||
class SaxEventLogger
|
||||
{
|
||||
public:
|
||||
|
@ -255,6 +258,7 @@ bool accept_helper(const std::string& s)
|
|||
// 7. return result
|
||||
return ok_accept;
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("parser class")
|
||||
{
|
||||
|
@ -402,8 +406,9 @@ TEST_CASE("parser class")
|
|||
// uses an iterator range.
|
||||
std::string s = "\"1\"";
|
||||
s[1] = '\0';
|
||||
CHECK_THROWS_AS(json::parse(s.begin(), s.end()), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(s.begin(), s.end()), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0000 (NUL) must be escaped to \\u0000; last read: '\"<U+0000>'");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(s.begin(), s.end()), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(s.begin(), s.end()), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0000 (NUL) must be escaped to \\u0000; last read: '\"<U+0000>'");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1105,8 +1110,8 @@ TEST_CASE("parser class")
|
|||
// only check error message if c is not a control character
|
||||
if (c > 0x1f)
|
||||
{
|
||||
CHECK_THROWS_WITH(parser_helper(s.c_str()),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid string: forbidden character after backslash; last read: '\"\\" + std::string(1, static_cast<char>(c)) + "'");
|
||||
CHECK_THROWS_WITH_STD_STR(parser_helper(s.c_str()),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid string: forbidden character after backslash; last read: '\"\\" + std::string(1, static_cast<char>(c)) + "'");
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -1181,8 +1186,8 @@ TEST_CASE("parser class")
|
|||
// only check error message if c is not a control character
|
||||
if (c > 0x1f)
|
||||
{
|
||||
CHECK_THROWS_WITH(parser_helper(s1.c_str()),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s1.substr(0, 7) + "'");
|
||||
CHECK_THROWS_WITH_STD_STR(parser_helper(s1.c_str()),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s1.substr(0, 7) + "'");
|
||||
}
|
||||
|
||||
CAPTURE(s2)
|
||||
|
@ -1190,8 +1195,8 @@ TEST_CASE("parser class")
|
|||
// only check error message if c is not a control character
|
||||
if (c > 0x1f)
|
||||
{
|
||||
CHECK_THROWS_WITH(parser_helper(s2.c_str()),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s2.substr(0, 6) + "'");
|
||||
CHECK_THROWS_WITH_STD_STR(parser_helper(s2.c_str()),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s2.substr(0, 6) + "'");
|
||||
}
|
||||
|
||||
CAPTURE(s3)
|
||||
|
@ -1199,8 +1204,8 @@ TEST_CASE("parser class")
|
|||
// only check error message if c is not a control character
|
||||
if (c > 0x1f)
|
||||
{
|
||||
CHECK_THROWS_WITH(parser_helper(s3.c_str()),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s3.substr(0, 5) + "'");
|
||||
CHECK_THROWS_WITH_STD_STR(parser_helper(s3.c_str()),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s3.substr(0, 5) + "'");
|
||||
}
|
||||
|
||||
CAPTURE(s4)
|
||||
|
@ -1208,26 +1213,28 @@ TEST_CASE("parser class")
|
|||
// only check error message if c is not a control character
|
||||
if (c > 0x1f)
|
||||
{
|
||||
CHECK_THROWS_WITH(parser_helper(s4.c_str()),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s4.substr(0, 4) + "'");
|
||||
CHECK_THROWS_WITH_STD_STR(parser_helper(s4.c_str()),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s4.substr(0, 4) + "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
json _;
|
||||
|
||||
// missing part of a surrogate pair
|
||||
CHECK_THROWS_AS(json::parse("\"\\uD80C\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uD80C\""),
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uD80C\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uD80C\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 8: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\"'");
|
||||
// invalid surrogate pair
|
||||
CHECK_THROWS_AS(json::parse("\"\\uD80C\\uD80C\""), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::parse("\"\\uD80C\\u0000\""), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::parse("\"\\uD80C\\uFFFF\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uD80C\\uD80C\""),
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uD80C\\uD80C\""), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uD80C\\u0000\""), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uD80C\\uFFFF\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uD80C\\uD80C\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\\uD80C'");
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uD80C\\u0000\""),
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uD80C\\u0000\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\\u0000'");
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uD80C\\uFFFF\""),
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uD80C\\uFFFF\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\\uFFFF'");
|
||||
}
|
||||
|
||||
|
@ -1675,12 +1682,13 @@ TEST_CASE("parser class")
|
|||
|
||||
CHECK(json::parse("{\"foo\": true:", cb, false).is_discarded());
|
||||
|
||||
CHECK_THROWS_AS(json::parse("{\"foo\": true:", cb), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse("{\"foo\": true:", cb),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse("{\"foo\": true:", cb), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("{\"foo\": true:", cb),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing object - unexpected ':'; expected '}'");
|
||||
|
||||
CHECK_THROWS_AS(json::parse("1.18973e+4932", cb), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(json::parse("1.18973e+4932", cb),
|
||||
CHECK_THROWS_AS(_ = json::parse("1.18973e+4932", cb), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("1.18973e+4932", cb),
|
||||
"[json.exception.out_of_range.406] number overflow parsing '1.18973e+4932'");
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,11 +27,13 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
||||
namespace
|
||||
{
|
||||
// helper function to check std::less<json::value_t>
|
||||
// see https://en.cppreference.com/w/cpp/utility/functional/less
|
||||
template <typename A, typename B, typename U = std::less<json::value_t>>
|
||||
|
@ -39,6 +41,7 @@ bool f(A a, B b, U u = U())
|
|||
{
|
||||
return u(a, b);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("lexicographical comparison operators")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
@ -73,8 +73,8 @@ TEST_CASE("concepts")
|
|||
// X::size_type must return an unsigned integer
|
||||
CHECK((std::is_unsigned<json::size_type>::value));
|
||||
// X::size_type can represent any non-negative value of X::difference_type
|
||||
CHECK(static_cast<json::size_type>(std::numeric_limits<json::difference_type>::max()) <=
|
||||
std::numeric_limits<json::size_type>::max());
|
||||
CHECK(static_cast<json::size_type>((std::numeric_limits<json::difference_type>::max)()) <=
|
||||
(std::numeric_limits<json::size_type>::max)());
|
||||
|
||||
// the expression "X u" has the post-condition "u.empty()"
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,16 +27,19 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
DOCTEST_GCC_SUPPRESS_WARNING("-Wfloat-equal")
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
#include <deque>
|
||||
#include <forward_list>
|
||||
#include <fstream>
|
||||
#include <list>
|
||||
#include <set>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <valarray>
|
||||
|
@ -1049,9 +1052,10 @@ TEST_CASE("constructors")
|
|||
|
||||
SECTION("object with error")
|
||||
{
|
||||
CHECK_THROWS_AS(json::object({ {"one", 1}, {"two", 1u}, {"three", 2.2}, {"four", false}, 13 }),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::object({ {"one", 1}, {"two", 1u}, {"three", 2.2}, {"four", false}, 13 }),
|
||||
json::type_error&);
|
||||
CHECK_THROWS_WITH(json::object({ {"one", 1}, {"two", 1u}, {"three", 2.2}, {"four", false}, 13 }),
|
||||
CHECK_THROWS_WITH(_ = json::object({ {"one", 1}, {"two", 1u}, {"three", 2.2}, {"four", false}, 13 }),
|
||||
"[json.exception.type_error.301] cannot create object from initializer list");
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,12 +27,17 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
#include <sstream>
|
||||
|
||||
namespace
|
||||
{
|
||||
void check_escaped(const char* original, const char* escaped = "", const bool ensure_ascii = false);
|
||||
void check_escaped(const char* original, const char* escaped, const bool ensure_ascii)
|
||||
{
|
||||
|
@ -41,6 +46,7 @@ void check_escaped(const char* original, const char* escaped, const bool ensure_
|
|||
s.dump_escaped(original, ensure_ascii);
|
||||
CHECK(ss.str() == escaped);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("convenience functions")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,15 +27,17 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
#include <deque>
|
||||
#include <forward_list>
|
||||
#include <list>
|
||||
#include <set>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <valarray>
|
||||
|
@ -140,6 +142,54 @@ TEST_CASE("value conversion")
|
|||
}
|
||||
}
|
||||
|
||||
SECTION("get an object (explicit, get_to)")
|
||||
{
|
||||
json::object_t o_reference = {{"object", json::object()},
|
||||
{"array", {1, 2, 3, 4}},
|
||||
{"number", 42},
|
||||
{"boolean", false},
|
||||
{"null", nullptr},
|
||||
{"string", "Hello world"}
|
||||
};
|
||||
json j(o_reference);
|
||||
|
||||
SECTION("json::object_t")
|
||||
{
|
||||
json::object_t o = {{"previous", "value"}};
|
||||
j.get_to(o);
|
||||
CHECK(json(o) == j);
|
||||
}
|
||||
|
||||
SECTION("std::map<json::string_t, json>")
|
||||
{
|
||||
std::map<json::string_t, json> o{{"previous", "value"}};
|
||||
j.get_to(o);
|
||||
CHECK(json(o) == j);
|
||||
}
|
||||
|
||||
SECTION("std::multimap<json::string_t, json>")
|
||||
{
|
||||
std::multimap<json::string_t, json> o{{"previous", "value"}};
|
||||
j.get_to(o);
|
||||
CHECK(json(o) == j);
|
||||
}
|
||||
|
||||
SECTION("std::unordered_map<json::string_t, json>")
|
||||
{
|
||||
std::unordered_map<json::string_t, json> o{{"previous", "value"}};
|
||||
j.get_to(o);
|
||||
CHECK(json(o) == j);
|
||||
}
|
||||
|
||||
SECTION("std::unordered_multimap<json::string_t, json>")
|
||||
{
|
||||
std::unordered_multimap<json::string_t, json> o{{"previous", "value"}};
|
||||
j.get_to(o);
|
||||
CHECK(json(o) == j);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
SECTION("get an object (implicit)")
|
||||
{
|
||||
json::object_t o_reference = {{"object", json::object()},
|
||||
|
@ -226,11 +276,6 @@ TEST_CASE("value conversion")
|
|||
#if not defined(JSON_NOEXCEPTION)
|
||||
SECTION("reserve is called on containers that supports it")
|
||||
{
|
||||
// making the call to from_json throw in order to check capacity
|
||||
std::vector<float> v;
|
||||
CHECK_THROWS_AS(nlohmann::from_json(j, v), json::type_error&);
|
||||
CHECK(v.capacity() == j.size());
|
||||
|
||||
// make sure all values are properly copied
|
||||
std::vector<int> v2 = json({1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
|
||||
CHECK(v2.size() == 10);
|
||||
|
@ -302,6 +347,65 @@ TEST_CASE("value conversion")
|
|||
}
|
||||
}
|
||||
|
||||
SECTION("get an array (explicit, get_to)")
|
||||
{
|
||||
json::array_t a_reference{json(1), json(1u), json(2.2),
|
||||
json(false), json("string"), json()};
|
||||
json j(a_reference);
|
||||
|
||||
SECTION("json::array_t")
|
||||
{
|
||||
json::array_t a{"previous", "value"};
|
||||
j.get_to(a);
|
||||
CHECK(json(a) == j);
|
||||
}
|
||||
|
||||
SECTION("std::valarray<json>")
|
||||
{
|
||||
std::valarray<json> a{"previous", "value"};
|
||||
j.get_to(a);
|
||||
CHECK(json(a) == j);
|
||||
}
|
||||
|
||||
SECTION("std::list<json>")
|
||||
{
|
||||
std::list<json> a{"previous", "value"};
|
||||
j.get_to(a);
|
||||
CHECK(json(a) == j);
|
||||
}
|
||||
|
||||
SECTION("std::forward_list<json>")
|
||||
{
|
||||
std::forward_list<json> a{"previous", "value"};
|
||||
j.get_to(a);
|
||||
CHECK(json(a) == j);
|
||||
}
|
||||
|
||||
SECTION("std::vector<json>")
|
||||
{
|
||||
std::vector<json> a{"previous", "value"};
|
||||
j.get_to(a);
|
||||
CHECK(json(a) == j);
|
||||
}
|
||||
|
||||
SECTION("built-in arrays")
|
||||
{
|
||||
const int nbs[] = {0, 1, 2};
|
||||
int nbs2[] = {0, 0, 0};
|
||||
|
||||
json j2 = nbs;
|
||||
j2.get_to(nbs2);
|
||||
CHECK(std::equal(std::begin(nbs), std::end(nbs), std::begin(nbs2)));
|
||||
}
|
||||
|
||||
SECTION("std::deque<json>")
|
||||
{
|
||||
std::deque<json> a{"previous", "value"};
|
||||
j.get_to(a);
|
||||
CHECK(json(a) == j);
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("get an array (implicit)")
|
||||
{
|
||||
json::array_t a_reference{json(1), json(1u), json(2.2),
|
||||
|
@ -433,6 +537,35 @@ TEST_CASE("value conversion")
|
|||
#endif
|
||||
}
|
||||
|
||||
SECTION("get a string (explicit, get_to)")
|
||||
{
|
||||
json::string_t s_reference{"Hello world"};
|
||||
json j(s_reference);
|
||||
|
||||
SECTION("string_t")
|
||||
{
|
||||
json::string_t s = "previous value";
|
||||
j.get_to(s);
|
||||
CHECK(json(s) == j);
|
||||
}
|
||||
|
||||
SECTION("std::string")
|
||||
{
|
||||
std::string s = "previous value";
|
||||
j.get_to(s);
|
||||
CHECK(json(s) == j);
|
||||
}
|
||||
#if defined(JSON_HAS_CPP_17)
|
||||
SECTION("std::string_view")
|
||||
{
|
||||
std::string s = "previous value";
|
||||
std::string_view sv = s;
|
||||
j.get_to(sv);
|
||||
CHECK(json(sv) == j);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
SECTION("get null (explicit)")
|
||||
{
|
||||
std::nullptr_t n = nullptr;
|
||||
|
@ -480,7 +613,7 @@ TEST_CASE("value conversion")
|
|||
#if defined(JSON_HAS_CPP_17)
|
||||
SECTION("std::string_view")
|
||||
{
|
||||
std::string_view s = j;
|
||||
std::string_view s = j.get<std::string_view>();
|
||||
CHECK(json(s) == j);
|
||||
}
|
||||
#endif
|
||||
|
@ -503,13 +636,19 @@ TEST_CASE("value conversion")
|
|||
CHECK(json(b) == j);
|
||||
}
|
||||
|
||||
SECTION("uint8_t")
|
||||
{
|
||||
auto n = j.get<uint8_t>();
|
||||
CHECK(n == 1);
|
||||
}
|
||||
|
||||
SECTION("bool")
|
||||
{
|
||||
bool b = j.get<bool>();
|
||||
CHECK(json(b) == j);
|
||||
}
|
||||
|
||||
SECTION("exception in case of a non-string type")
|
||||
SECTION("exception in case of a non-number type")
|
||||
{
|
||||
CHECK_THROWS_AS(json(json::value_t::null).get<json::boolean_t>(),
|
||||
json::type_error&);
|
||||
|
@ -519,6 +658,8 @@ TEST_CASE("value conversion")
|
|||
json::type_error&);
|
||||
CHECK_THROWS_AS(json(json::value_t::string).get<json::boolean_t>(),
|
||||
json::type_error&);
|
||||
CHECK_THROWS_AS(json(json::value_t::string).get<uint8_t>(),
|
||||
json::type_error&);
|
||||
CHECK_THROWS_AS(
|
||||
json(json::value_t::number_integer).get<json::boolean_t>(),
|
||||
json::type_error&);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,14 +27,17 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <valarray>
|
||||
|
||||
namespace
|
||||
{
|
||||
struct SaxEventLogger : public nlohmann::json_sax<json>
|
||||
{
|
||||
bool null() override
|
||||
|
@ -166,6 +169,7 @@ struct SaxEventLoggerExitAfterStartArray : public SaxEventLogger
|
|||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
TEST_CASE("deserialization")
|
||||
{
|
||||
|
@ -265,8 +269,10 @@ TEST_CASE("deserialization")
|
|||
ss3 << "[\"foo\",1,2,3,false,{\"one\":1}";
|
||||
ss4 << "[\"foo\",1,2,3,false,{\"one\":1}";
|
||||
ss5 << "[\"foo\",1,2,3,false,{\"one\":1}";
|
||||
CHECK_THROWS_AS(json::parse(ss1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(ss2),
|
||||
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(ss1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(ss2),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 29: syntax error while parsing array - unexpected end of input; expected ']'");
|
||||
CHECK(not json::accept(ss3));
|
||||
|
||||
|
@ -289,8 +295,9 @@ TEST_CASE("deserialization")
|
|||
SECTION("string")
|
||||
{
|
||||
json::string_t s = "[\"foo\",1,2,3,false,{\"one\":1}";
|
||||
CHECK_THROWS_AS(json::parse(s), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(s),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(s), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(s),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 29: syntax error while parsing array - unexpected end of input; expected ']'");
|
||||
CHECK(not json::accept(s));
|
||||
|
||||
|
@ -426,7 +433,8 @@ TEST_CASE("deserialization")
|
|||
SECTION("empty container")
|
||||
{
|
||||
std::vector<uint8_t> v;
|
||||
CHECK_THROWS_AS(json::parse(v), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(v), json::parse_error&);
|
||||
CHECK(not json::accept(v));
|
||||
|
||||
SaxEventLogger l;
|
||||
|
@ -610,8 +618,9 @@ TEST_CASE("deserialization")
|
|||
SECTION("case 6")
|
||||
{
|
||||
uint8_t v[] = {'\"', 0x7F, 0xDF, 0x7F};
|
||||
CHECK_THROWS_AS(json::parse(std::begin(v), std::end(v)), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(std::begin(v), std::end(v)),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(std::begin(v), std::end(v)), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(std::begin(v), std::end(v)),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: ill-formed UTF-8 byte; last read: '\"\x7f\xdf\x7f'");
|
||||
CHECK(not json::accept(std::begin(v), std::end(v)));
|
||||
|
||||
|
@ -797,12 +806,13 @@ TEST_CASE("deserialization")
|
|||
|
||||
SECTION("BOM only")
|
||||
{
|
||||
CHECK_THROWS_AS(json::parse(bom), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(bom),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(bom), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(bom),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal");
|
||||
|
||||
CHECK_THROWS_AS(json::parse(std::istringstream(bom)), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(std::istringstream(bom)),
|
||||
CHECK_THROWS_AS(_ = json::parse(std::istringstream(bom)), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(std::istringstream(bom)),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal");
|
||||
|
||||
SaxEventLogger l;
|
||||
|
@ -836,12 +846,13 @@ TEST_CASE("deserialization")
|
|||
|
||||
SECTION("2 byte of BOM")
|
||||
{
|
||||
CHECK_THROWS_AS(json::parse(bom.substr(0, 2)), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(bom.substr(0, 2)),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(bom.substr(0, 2)), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(bom.substr(0, 2)),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid BOM; must be 0xEF 0xBB 0xBF if given; last read: '\xEF\xBB'");
|
||||
|
||||
CHECK_THROWS_AS(json::parse(std::istringstream(bom.substr(0, 2))), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(std::istringstream(bom.substr(0, 2))),
|
||||
CHECK_THROWS_AS(_ = json::parse(std::istringstream(bom.substr(0, 2))), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(std::istringstream(bom.substr(0, 2))),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid BOM; must be 0xEF 0xBB 0xBF if given; last read: '\xEF\xBB'");
|
||||
|
||||
SaxEventLogger l1, l2;
|
||||
|
@ -861,12 +872,13 @@ TEST_CASE("deserialization")
|
|||
|
||||
SECTION("1 byte of BOM")
|
||||
{
|
||||
CHECK_THROWS_AS(json::parse(bom.substr(0, 1)), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(bom.substr(0, 1)),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(bom.substr(0, 1)), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(bom.substr(0, 1)),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid BOM; must be 0xEF 0xBB 0xBF if given; last read: '\xEF'");
|
||||
|
||||
CHECK_THROWS_AS(json::parse(std::istringstream(bom.substr(0, 1))), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(std::istringstream(bom.substr(0, 1))),
|
||||
CHECK_THROWS_AS(_ = json::parse(std::istringstream(bom.substr(0, 1))), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(std::istringstream(bom.substr(0, 1))),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid BOM; must be 0xEF 0xBB 0xBF if given; last read: '\xEF'");
|
||||
|
||||
SaxEventLogger l1, l2;
|
||||
|
@ -921,8 +933,9 @@ TEST_CASE("deserialization")
|
|||
else
|
||||
{
|
||||
// any variation is an error
|
||||
CHECK_THROWS_AS(json::parse(s + "null"), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::parse(std::istringstream(s + "null")), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(s + "null"), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::parse(std::istringstream(s + "null")), json::parse_error&);
|
||||
|
||||
SaxEventLogger l;
|
||||
CHECK(not json::sax_parse(s + "null", &l));
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
@ -1072,7 +1072,8 @@ TEST_CASE("element access 2")
|
|||
}
|
||||
}
|
||||
|
||||
TEST_CASE("element access 2 (throwing tests)", "[!throws]")
|
||||
#if not defined(JSON_NOEXCEPTION)
|
||||
TEST_CASE("element access 2 (throwing tests)")
|
||||
{
|
||||
SECTION("object")
|
||||
{
|
||||
|
@ -1105,3 +1106,4 @@ TEST_CASE("element access 2 (throwing tests)", "[!throws]")
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,12 +27,14 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <fstream>
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
|
||||
TEST_CASE("object inspection")
|
||||
{
|
||||
SECTION("convenience type checker")
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,11 +27,12 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
TEST_CASE("iterators 1")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
@ -342,7 +342,7 @@ TEST_CASE("JSON patch")
|
|||
|
||||
// check that evaluation throws
|
||||
CHECK_THROWS_AS(doc.patch(patch), json::other_error&);
|
||||
CHECK_THROWS_WITH(doc.patch(patch), "[json.exception.other_error.501] unsuccessful: " + patch[0].dump());
|
||||
CHECK_THROWS_WITH_STD_STR(doc.patch(patch), "[json.exception.other_error.501] unsuccessful: " + patch[0].dump());
|
||||
}
|
||||
|
||||
SECTION("A.10. Adding a Nested Member Object")
|
||||
|
@ -483,7 +483,7 @@ TEST_CASE("JSON patch")
|
|||
|
||||
// check that evaluation throws
|
||||
CHECK_THROWS_AS(doc.patch(patch), json::other_error&);
|
||||
CHECK_THROWS_WITH(doc.patch(patch), "[json.exception.other_error.501] unsuccessful: " + patch[0].dump());
|
||||
CHECK_THROWS_WITH_STD_STR(doc.patch(patch), "[json.exception.other_error.501] unsuccessful: " + patch[0].dump());
|
||||
}
|
||||
|
||||
SECTION("A.16. Adding an Array Value")
|
||||
|
@ -1182,7 +1182,7 @@ TEST_CASE("JSON patch")
|
|||
|
||||
// the test will fail
|
||||
CHECK_THROWS_AS(doc.patch(patch), json::other_error&);
|
||||
CHECK_THROWS_WITH(doc.patch(patch), "[json.exception.other_error.501] unsuccessful: " + patch[0].dump());
|
||||
CHECK_THROWS_WITH_STD_STR(doc.patch(patch), "[json.exception.other_error.501] unsuccessful: " + patch[0].dump());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1268,7 +1268,7 @@ TEST_CASE("JSON patch")
|
|||
|
||||
for (const auto& test : suite)
|
||||
{
|
||||
CAPTURE(test.value("comment", ""))
|
||||
INFO_WITH_TEMP(test.value("comment", ""));
|
||||
|
||||
// skip tests marked as disabled
|
||||
if (test.value("disabled", false))
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,11 +27,12 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
TEST_CASE("JSON pointers")
|
||||
{
|
||||
|
@ -89,12 +90,18 @@ TEST_CASE("JSON pointers")
|
|||
// the whole document
|
||||
CHECK(j[json::json_pointer()] == j);
|
||||
CHECK(j[json::json_pointer("")] == j);
|
||||
CHECK(j.contains(json::json_pointer()));
|
||||
CHECK(j.contains(json::json_pointer("")));
|
||||
|
||||
// array access
|
||||
CHECK(j[json::json_pointer("/foo")] == j["foo"]);
|
||||
CHECK(j.contains(json::json_pointer("/foo")));
|
||||
CHECK(j[json::json_pointer("/foo/0")] == j["foo"][0]);
|
||||
CHECK(j[json::json_pointer("/foo/1")] == j["foo"][1]);
|
||||
CHECK(j["/foo/1"_json_pointer] == j["foo"][1]);
|
||||
CHECK(j.contains(json::json_pointer("/foo/0")));
|
||||
CHECK(j.contains(json::json_pointer("/foo/1")));
|
||||
CHECK(not j.contains(json::json_pointer("/foo/-")));
|
||||
|
||||
// checked array access
|
||||
CHECK(j.at(json::json_pointer("/foo/0")) == j["foo"][0]);
|
||||
|
@ -102,6 +109,8 @@ TEST_CASE("JSON pointers")
|
|||
|
||||
// empty string access
|
||||
CHECK(j[json::json_pointer("/")] == j[""]);
|
||||
CHECK(j.contains(json::json_pointer("")));
|
||||
CHECK(j.contains(json::json_pointer("/")));
|
||||
|
||||
// other cases
|
||||
CHECK(j[json::json_pointer("/ ")] == j[" "]);
|
||||
|
@ -111,6 +120,14 @@ TEST_CASE("JSON pointers")
|
|||
CHECK(j[json::json_pointer("/i\\j")] == j["i\\j"]);
|
||||
CHECK(j[json::json_pointer("/k\"l")] == j["k\"l"]);
|
||||
|
||||
// contains
|
||||
CHECK(j.contains(json::json_pointer("/ ")));
|
||||
CHECK(j.contains(json::json_pointer("/c%d")));
|
||||
CHECK(j.contains(json::json_pointer("/e^f")));
|
||||
CHECK(j.contains(json::json_pointer("/g|h")));
|
||||
CHECK(j.contains(json::json_pointer("/i\\j")));
|
||||
CHECK(j.contains(json::json_pointer("/k\"l")));
|
||||
|
||||
// checked access
|
||||
CHECK(j.at(json::json_pointer("/ ")) == j[" "]);
|
||||
CHECK(j.at(json::json_pointer("/c%d")) == j["c%d"]);
|
||||
|
@ -122,14 +139,24 @@ TEST_CASE("JSON pointers")
|
|||
// escaped access
|
||||
CHECK(j[json::json_pointer("/a~1b")] == j["a/b"]);
|
||||
CHECK(j[json::json_pointer("/m~0n")] == j["m~n"]);
|
||||
CHECK(j.contains(json::json_pointer("/a~1b")));
|
||||
CHECK(j.contains(json::json_pointer("/m~0n")));
|
||||
|
||||
// unescaped access
|
||||
// access to nonexisting values yield object creation
|
||||
CHECK(not j.contains(json::json_pointer("/a/b")));
|
||||
CHECK_NOTHROW(j[json::json_pointer("/a/b")] = 42);
|
||||
CHECK(j.contains(json::json_pointer("/a/b")));
|
||||
CHECK(j["a"]["b"] == json(42));
|
||||
|
||||
CHECK(not j.contains(json::json_pointer("/a/c/1")));
|
||||
CHECK_NOTHROW(j[json::json_pointer("/a/c/1")] = 42);
|
||||
CHECK(j["a"]["c"] == json({nullptr, 42}));
|
||||
CHECK(j.contains(json::json_pointer("/a/c/1")));
|
||||
|
||||
CHECK(not j.contains(json::json_pointer("/a/d/-")));
|
||||
CHECK_NOTHROW(j[json::json_pointer("/a/d/-")] = 42);
|
||||
CHECK(not j.contains(json::json_pointer("/a/d/-")));
|
||||
CHECK(j["a"]["d"] == json::array({42}));
|
||||
// "/a/b" works for JSON {"a": {"b": 42}}
|
||||
CHECK(json({{"a", {{"b", 42}}}})[json::json_pointer("/a/b")] == json(42));
|
||||
|
@ -142,6 +169,7 @@ TEST_CASE("JSON pointers")
|
|||
CHECK_THROWS_AS(j_primitive.at("/foo"_json_pointer), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(j_primitive.at("/foo"_json_pointer),
|
||||
"[json.exception.out_of_range.404] unresolved reference token 'foo'");
|
||||
CHECK(not j_primitive.contains(json::json_pointer("/foo")));
|
||||
}
|
||||
|
||||
SECTION("const access")
|
||||
|
@ -232,11 +260,16 @@ TEST_CASE("JSON pointers")
|
|||
|
||||
// the whole document
|
||||
CHECK(j[""_json_pointer] == j);
|
||||
CHECK(j.contains(""_json_pointer));
|
||||
|
||||
// array access
|
||||
CHECK(j["/foo"_json_pointer] == j["foo"]);
|
||||
CHECK(j["/foo/0"_json_pointer] == j["foo"][0]);
|
||||
CHECK(j["/foo/1"_json_pointer] == j["foo"][1]);
|
||||
CHECK(j.contains("/foo"_json_pointer));
|
||||
CHECK(j.contains("/foo/0"_json_pointer));
|
||||
CHECK(j.contains("/foo/1"_json_pointer));
|
||||
CHECK(not j.contains("/foo/-"_json_pointer));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -277,6 +310,12 @@ TEST_CASE("JSON pointers")
|
|||
CHECK_THROWS_AS(j_const.at("/01"_json_pointer), json::parse_error&);
|
||||
CHECK_THROWS_WITH(j_const.at("/01"_json_pointer),
|
||||
"[json.exception.parse_error.106] parse error: array index '01' must not begin with '0'");
|
||||
CHECK_THROWS_AS(j.contains("/01"_json_pointer), json::parse_error&);
|
||||
CHECK_THROWS_WITH(j.contains("/01"_json_pointer),
|
||||
"[json.exception.parse_error.106] parse error: array index '01' must not begin with '0'");
|
||||
CHECK_THROWS_AS(j_const.contains("/01"_json_pointer), json::parse_error&);
|
||||
CHECK_THROWS_WITH(j_const.contains("/01"_json_pointer),
|
||||
"[json.exception.parse_error.106] parse error: array index '01' must not begin with '0'");
|
||||
|
||||
// error with incorrect numbers
|
||||
CHECK_THROWS_AS(j["/one"_json_pointer] = 1, json::parse_error&);
|
||||
|
@ -293,6 +332,13 @@ TEST_CASE("JSON pointers")
|
|||
CHECK_THROWS_WITH(j_const.at("/one"_json_pointer) == 1,
|
||||
"[json.exception.parse_error.109] parse error: array index 'one' is not a number");
|
||||
|
||||
CHECK_THROWS_AS(j.contains("/one"_json_pointer), json::parse_error&);
|
||||
CHECK_THROWS_WITH(j.contains("/one"_json_pointer),
|
||||
"[json.exception.parse_error.109] parse error: array index 'one' is not a number");
|
||||
CHECK_THROWS_AS(j_const.contains("/one"_json_pointer), json::parse_error&);
|
||||
CHECK_THROWS_WITH(j_const.contains("/one"_json_pointer),
|
||||
"[json.exception.parse_error.109] parse error: array index 'one' is not a number");
|
||||
|
||||
CHECK_THROWS_AS(json({{"/list/0", 1}, {"/list/1", 2}, {"/list/three", 3}}).unflatten(), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json({{"/list/0", 1}, {"/list/1", 2}, {"/list/three", 3}}).unflatten(),
|
||||
"[json.exception.parse_error.109] parse error: array index 'three' is not a number");
|
||||
|
@ -305,6 +351,7 @@ TEST_CASE("JSON pointers")
|
|||
CHECK_THROWS_AS(j_const["/-"_json_pointer], json::out_of_range&);
|
||||
CHECK_THROWS_WITH(j_const["/-"_json_pointer],
|
||||
"[json.exception.out_of_range.402] array index '-' (3) is out of range");
|
||||
CHECK(not j_const.contains("/-"_json_pointer));
|
||||
|
||||
// error when using "-" with at
|
||||
CHECK_THROWS_AS(j.at("/-"_json_pointer), json::out_of_range&);
|
||||
|
@ -313,6 +360,7 @@ TEST_CASE("JSON pointers")
|
|||
CHECK_THROWS_AS(j_const.at("/-"_json_pointer), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(j_const.at("/-"_json_pointer),
|
||||
"[json.exception.out_of_range.402] array index '-' (3) is out of range");
|
||||
CHECK(not j_const.contains("/-"_json_pointer));
|
||||
}
|
||||
|
||||
SECTION("const access")
|
||||
|
@ -328,11 +376,13 @@ TEST_CASE("JSON pointers")
|
|||
CHECK_THROWS_AS(j.at("/3"_json_pointer), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(j.at("/3"_json_pointer),
|
||||
"[json.exception.out_of_range.401] array index 3 is out of range");
|
||||
CHECK(not j.contains("/3"_json_pointer));
|
||||
|
||||
// assign to nonexisting index (with gap)
|
||||
CHECK_THROWS_AS(j.at("/5"_json_pointer), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(j.at("/5"_json_pointer),
|
||||
"[json.exception.out_of_range.401] array index 5 is out of range");
|
||||
CHECK(not j.contains("/5"_json_pointer));
|
||||
|
||||
// assign to "-"
|
||||
CHECK_THROWS_AS(j["/-"_json_pointer], json::out_of_range&);
|
||||
|
@ -341,8 +391,8 @@ TEST_CASE("JSON pointers")
|
|||
CHECK_THROWS_AS(j.at("/-"_json_pointer), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(j.at("/-"_json_pointer),
|
||||
"[json.exception.out_of_range.402] array index '-' (3) is out of range");
|
||||
CHECK(not j.contains("/-"_json_pointer));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
SECTION("flatten")
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
@ -43,10 +43,10 @@ TEST_CASE("version information")
|
|||
CHECK(j["url"] == "https://github.com/nlohmann/json");
|
||||
CHECK(j["version"] == json(
|
||||
{
|
||||
{"string", "3.6.1"},
|
||||
{"string", "3.7.0"},
|
||||
{"major", 3},
|
||||
{"minor", 6},
|
||||
{"patch", 1}
|
||||
{"minor", 7},
|
||||
{"patch", 0}
|
||||
}));
|
||||
|
||||
CHECK(j.find("platform") != j.end());
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
@ -290,8 +290,10 @@ TEST_CASE("modifiers")
|
|||
SECTION("null")
|
||||
{
|
||||
json j;
|
||||
j.emplace_back(1);
|
||||
j.emplace_back(2);
|
||||
auto& x1 = j.emplace_back(1);
|
||||
CHECK(x1 == 1);
|
||||
auto& x2 = j.emplace_back(2);
|
||||
CHECK(x2 == 2);
|
||||
CHECK(j.type() == json::value_t::array);
|
||||
CHECK(j == json({1, 2}));
|
||||
}
|
||||
|
@ -299,7 +301,8 @@ TEST_CASE("modifiers")
|
|||
SECTION("array")
|
||||
{
|
||||
json j = {1, 2, 3};
|
||||
j.emplace_back("Hello");
|
||||
auto& x = j.emplace_back("Hello");
|
||||
CHECK(x == "Hello");
|
||||
CHECK(j.type() == json::value_t::array);
|
||||
CHECK(j == json({1, 2, 3, "Hello"}));
|
||||
}
|
||||
|
@ -307,7 +310,8 @@ TEST_CASE("modifiers")
|
|||
SECTION("multiple values")
|
||||
{
|
||||
json j;
|
||||
j.emplace_back(3, "foo");
|
||||
auto& x = j.emplace_back(3, "foo");
|
||||
CHECK(x == json({"foo", "foo", "foo"}));
|
||||
CHECK(j.type() == json::value_t::array);
|
||||
CHECK(j == json({{"foo", "foo", "foo"}}));
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,13 +27,18 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
#include <set>
|
||||
|
||||
namespace
|
||||
{
|
||||
class SaxCountdown
|
||||
{
|
||||
public:
|
||||
|
@ -103,6 +108,7 @@ class SaxCountdown
|
|||
private:
|
||||
int events_left = 0;
|
||||
};
|
||||
}
|
||||
|
||||
TEST_CASE("MessagePack")
|
||||
{
|
||||
|
@ -1122,71 +1128,73 @@ TEST_CASE("MessagePack")
|
|||
{
|
||||
SECTION("empty byte vector")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>()), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>()),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>()), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>()),
|
||||
"[json.exception.parse_error.110] parse error at byte 1: syntax error while parsing MessagePack value: unexpected end of input");
|
||||
CHECK(json::from_msgpack(std::vector<uint8_t>(), true, false).is_discarded());
|
||||
}
|
||||
|
||||
SECTION("too short byte vector")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0x87})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcc})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcd})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcd, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xce})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xce, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xce, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xce, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcf})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xa5, 0x68, 0x65})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0x92, 0x01})), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0x81, 0xa1, 0x61})), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0x87})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcc})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcd})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcd, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xce})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xce, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xce, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xce, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcf})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xa5, 0x68, 0x65})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0x92, 0x01})), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0x81, 0xa1, 0x61})), json::parse_error&);
|
||||
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0x87})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0x87})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing MessagePack string: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcc})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcc})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcd})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcd})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcd, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcd, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xce})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xce})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xce, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xce, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xce, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xce, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xce, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xce, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcf})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcf})),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 6: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 7: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 8: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xcf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00})),
|
||||
"[json.exception.parse_error.110] parse error at byte 9: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xa5, 0x68, 0x65})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xa5, 0x68, 0x65})),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing MessagePack string: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0x92, 0x01})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0x92, 0x01})),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing MessagePack value: unexpected end of input");
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0x81, 0xa1, 0x61})),
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0x81, 0xa1, 0x61})),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing MessagePack value: unexpected end of input");
|
||||
|
||||
CHECK(json::from_msgpack(std::vector<uint8_t>({0x87}), true, false).is_discarded());
|
||||
|
@ -1214,13 +1222,14 @@ TEST_CASE("MessagePack")
|
|||
{
|
||||
SECTION("concrete examples")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xc1})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xc1})),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xc1})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xc1})),
|
||||
"[json.exception.parse_error.112] parse error at byte 1: syntax error while parsing MessagePack value: invalid byte: 0xC1");
|
||||
CHECK(json::from_msgpack(std::vector<uint8_t>({0xc6}), true, false).is_discarded());
|
||||
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0xc6})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0xc6})),
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0xc6})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0xc6})),
|
||||
"[json.exception.parse_error.112] parse error at byte 1: syntax error while parsing MessagePack value: invalid byte: 0xC6");
|
||||
CHECK(json::from_msgpack(std::vector<uint8_t>({0xc6}), true, false).is_discarded());
|
||||
}
|
||||
|
@ -1239,7 +1248,8 @@ TEST_CASE("MessagePack")
|
|||
0xd4, 0xd5, 0xd6, 0xd7, 0xd8
|
||||
})
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({static_cast<uint8_t>(byte)})), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({static_cast<uint8_t>(byte)})), json::parse_error&);
|
||||
CHECK(json::from_msgpack(std::vector<uint8_t>({static_cast<uint8_t>(byte)}), true, false).is_discarded());
|
||||
}
|
||||
}
|
||||
|
@ -1247,8 +1257,9 @@ TEST_CASE("MessagePack")
|
|||
|
||||
SECTION("invalid string in map")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_msgpack(std::vector<uint8_t>({0x81, 0xff, 0x01})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_msgpack(std::vector<uint8_t>({0x81, 0xff, 0x01})),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(std::vector<uint8_t>({0x81, 0xff, 0x01})), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(std::vector<uint8_t>({0x81, 0xff, 0x01})),
|
||||
"[json.exception.parse_error.113] parse error at byte 2: syntax error while parsing MessagePack string: expected length specification (0xA0-0xBF, 0xD9-0xDB); last byte: 0xFF");
|
||||
CHECK(json::from_msgpack(std::vector<uint8_t>({0x81, 0xff, 0x01}), true, false).is_discarded());
|
||||
}
|
||||
|
@ -1264,8 +1275,9 @@ TEST_CASE("MessagePack")
|
|||
|
||||
SECTION("strict mode")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_msgpack(vec), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_msgpack(vec),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(vec), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(vec),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing MessagePack value: expected end of input; last byte: 0xC0");
|
||||
CHECK(json::from_msgpack(vec, true, false).is_discarded());
|
||||
}
|
||||
|
@ -1297,7 +1309,6 @@ TEST_CASE("MessagePack")
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
// use this testcase outside [hide] to run it with Valgrind
|
||||
TEST_CASE("single MessagePack roundtrip")
|
||||
{
|
||||
|
@ -1344,8 +1355,7 @@ TEST_CASE("single MessagePack roundtrip")
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
TEST_CASE("MessagePack roundtrips", "[hide]")
|
||||
TEST_CASE("MessagePack roundtrips" * doctest::skip())
|
||||
{
|
||||
SECTION("input from msgpack-python")
|
||||
{
|
||||
|
@ -1519,8 +1529,8 @@ TEST_CASE("MessagePack roundtrips", "[hide]")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
|
||||
SECTION(filename + ": std::vector<uint8_t>")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": std::vector<uint8_t>");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1537,8 +1547,8 @@ TEST_CASE("MessagePack roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": std::ifstream")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": std::ifstream");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1552,8 +1562,8 @@ TEST_CASE("MessagePack roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": uint8_t* and size")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": uint8_t* and size");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1570,8 +1580,8 @@ TEST_CASE("MessagePack roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": output to output adapters")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": output to output adapters");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -1584,8 +1594,8 @@ TEST_CASE("MessagePack roundtrips", "[hide]")
|
|||
|
||||
if (!exclude_packed.count(filename))
|
||||
{
|
||||
SECTION(filename + ": output adapters: std::vector<uint8_t>")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": output adapters: std::vector<uint8_t>");
|
||||
std::vector<uint8_t> vec;
|
||||
json::to_msgpack(j1, vec);
|
||||
CHECK(vec == packed);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,11 +27,14 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
namespace
|
||||
{
|
||||
enum test
|
||||
{
|
||||
};
|
||||
|
@ -58,6 +61,7 @@ static_assert(noexcept(json(pod{})), "");
|
|||
static_assert(noexcept(j.get<pod>()), "");
|
||||
static_assert(not noexcept(j.get<pod_bis>()), "");
|
||||
static_assert(noexcept(json(pod{})), "");
|
||||
}
|
||||
|
||||
TEST_CASE("runtime checks")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,8 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
DOCTEST_GCC_SUPPRESS_WARNING("-Wfloat-equal")
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
@ -35,16 +36,19 @@ using nlohmann::json;
|
|||
#include <deque>
|
||||
#include <forward_list>
|
||||
#include <list>
|
||||
#include <set>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
|
||||
#if defined(_MSC_VER)
|
||||
#pragma warning (push)
|
||||
#pragma warning (disable : 4189) // local variable is initialized but not referenced
|
||||
#endif
|
||||
|
||||
TEST_CASE("README", "[hide]")
|
||||
TEST_CASE("README" * doctest::skip())
|
||||
{
|
||||
{
|
||||
// redirect std::cout for the README file
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,8 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
DOCTEST_GCC_SUPPRESS_WARNING("-Wfloat-equal")
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,11 +27,21 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
DOCTEST_GCC_SUPPRESS_WARNING("-Wfloat-equal")
|
||||
|
||||
// for some reason including this after the json header leads to linker errors with VS 2017...
|
||||
#include <locale>
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <list>
|
||||
#include <cstdio>
|
||||
|
||||
#if (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464
|
||||
#define JSON_HAS_CPP_17
|
||||
|
@ -43,10 +53,6 @@ using nlohmann::json;
|
|||
|
||||
#include "fifo_map.hpp"
|
||||
|
||||
#include <fstream>
|
||||
#include <list>
|
||||
#include <cstdio>
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
// for #972
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
|
@ -287,7 +293,7 @@ TEST_CASE("regression tests")
|
|||
int number = j["Number"];
|
||||
CHECK(number == 100);
|
||||
float foo = j["Foo"];
|
||||
CHECK(foo == Approx(42.42));
|
||||
CHECK(static_cast<double>(foo) == Approx(42.42));
|
||||
}
|
||||
|
||||
SECTION("issue #89 - nonstandard integer type")
|
||||
|
@ -297,8 +303,7 @@ TEST_CASE("regression tests")
|
|||
nlohmann::basic_json<std::map, std::vector, std::string, bool, int32_t, uint32_t, float>;
|
||||
custom_json j;
|
||||
j["int_1"] = 1;
|
||||
// we need to cast to int to compile with Catch - the value is int32_t
|
||||
CHECK(static_cast<int>(j["int_1"]) == 1);
|
||||
CHECK(j["int_1"] == 1);
|
||||
|
||||
// tests for correct handling of non-standard integers that overflow the type selected by the user
|
||||
|
||||
|
@ -691,8 +696,9 @@ TEST_CASE("regression tests")
|
|||
|
||||
SECTION("issue #329 - serialized value not always can be parsed")
|
||||
{
|
||||
CHECK_THROWS_AS(json::parse("22e2222"), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(json::parse("22e2222"),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse("22e2222"), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("22e2222"),
|
||||
"[json.exception.out_of_range.406] number overflow parsing '22e2222'");
|
||||
}
|
||||
|
||||
|
@ -714,7 +720,7 @@ TEST_CASE("regression tests")
|
|||
};
|
||||
|
||||
check_roundtrip(100000000000.1236);
|
||||
check_roundtrip(std::numeric_limits<json::number_float_t>::max());
|
||||
check_roundtrip((std::numeric_limits<json::number_float_t>::max)());
|
||||
|
||||
// Some more numbers which fail to roundtrip when serialized with digits10 significand digits (instead of max_digits10)
|
||||
check_roundtrip(1.541888611948064e-17);
|
||||
|
@ -757,8 +763,9 @@ TEST_CASE("regression tests")
|
|||
SECTION("issue #366 - json::parse on failed stream gets stuck")
|
||||
{
|
||||
std::ifstream f("file_not_found.json");
|
||||
CHECK_THROWS_AS(json::parse(f), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse(f), "[json.exception.parse_error.101] parse error at line 1, column 1: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(f), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse(f), "[json.exception.parse_error.101] parse error at line 1, column 1: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal");
|
||||
}
|
||||
|
||||
SECTION("issue #367 - calling stream at EOF")
|
||||
|
@ -953,50 +960,55 @@ TEST_CASE("regression tests")
|
|||
{
|
||||
// original test case
|
||||
std::vector<uint8_t> vec {0x65, 0xf5, 0x0a, 0x48, 0x21};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec),
|
||||
"[json.exception.parse_error.110] parse error at byte 6: syntax error while parsing CBOR string: unexpected end of input");
|
||||
}
|
||||
|
||||
SECTION("issue #407 - Heap-buffer-overflow (OSS-Fuzz issue 343)")
|
||||
{
|
||||
json _;
|
||||
|
||||
// original test case: incomplete float64
|
||||
std::vector<uint8_t> vec1 {0xcb, 0x8f, 0x0a};
|
||||
CHECK_THROWS_AS(json::from_msgpack(vec1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_msgpack(vec1),
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(vec1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(vec1),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
|
||||
// related test case: incomplete float32
|
||||
std::vector<uint8_t> vec2 {0xca, 0x8f, 0x0a};
|
||||
CHECK_THROWS_AS(json::from_msgpack(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_msgpack(vec2),
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(vec2),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing MessagePack number: unexpected end of input");
|
||||
|
||||
// related test case: incomplete Half-Precision Float (CBOR)
|
||||
std::vector<uint8_t> vec3 {0xf9, 0x8f};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec3), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec3),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec3), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec3),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing CBOR number: unexpected end of input");
|
||||
|
||||
// related test case: incomplete Single-Precision Float (CBOR)
|
||||
std::vector<uint8_t> vec4 {0xfa, 0x8f, 0x0a};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec4), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec4),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec4), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec4),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing CBOR number: unexpected end of input");
|
||||
|
||||
// related test case: incomplete Double-Precision Float (CBOR)
|
||||
std::vector<uint8_t> vec5 {0xfb, 0x8f, 0x0a};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec5), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec5),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec5), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec5),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing CBOR number: unexpected end of input");
|
||||
}
|
||||
|
||||
SECTION("issue #408 - Heap-buffer-overflow (OSS-Fuzz issue 344)")
|
||||
{
|
||||
json _;
|
||||
|
||||
// original test case
|
||||
std::vector<uint8_t> vec1 {0x87};
|
||||
CHECK_THROWS_AS(json::from_msgpack(vec1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_msgpack(vec1),
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(vec1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(vec1),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing MessagePack string: unexpected end of input");
|
||||
|
||||
// more test cases for MessagePack
|
||||
|
@ -1009,7 +1021,7 @@ TEST_CASE("regression tests")
|
|||
})
|
||||
{
|
||||
std::vector<uint8_t> vec(1, static_cast<uint8_t>(b));
|
||||
CHECK_THROWS_AS(json::from_msgpack(vec), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(vec), json::parse_error&);
|
||||
}
|
||||
|
||||
// more test cases for CBOR
|
||||
|
@ -1024,37 +1036,39 @@ TEST_CASE("regression tests")
|
|||
})
|
||||
{
|
||||
std::vector<uint8_t> vec(1, static_cast<uint8_t>(b));
|
||||
CHECK_THROWS_AS(json::from_cbor(vec), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec), json::parse_error&);
|
||||
}
|
||||
|
||||
// special case: empty input
|
||||
std::vector<uint8_t> vec2;
|
||||
CHECK_THROWS_AS(json::from_cbor(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec2),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec2),
|
||||
"[json.exception.parse_error.110] parse error at byte 1: syntax error while parsing CBOR value: unexpected end of input");
|
||||
CHECK_THROWS_AS(json::from_msgpack(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_msgpack(vec2),
|
||||
CHECK_THROWS_AS(_ = json::from_msgpack(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_msgpack(vec2),
|
||||
"[json.exception.parse_error.110] parse error at byte 1: syntax error while parsing MessagePack value: unexpected end of input");
|
||||
}
|
||||
|
||||
SECTION("issue #411 - Heap-buffer-overflow (OSS-Fuzz issue 366)")
|
||||
{
|
||||
json _;
|
||||
|
||||
// original test case: empty UTF-8 string (indefinite length)
|
||||
std::vector<uint8_t> vec1 {0x7f};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec1),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec1),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR string: unexpected end of input");
|
||||
|
||||
// related test case: empty array (indefinite length)
|
||||
std::vector<uint8_t> vec2 {0x9f};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec2),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec2),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR value: unexpected end of input");
|
||||
|
||||
// related test case: empty map (indefinite length)
|
||||
std::vector<uint8_t> vec3 {0xbf};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec3), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec3),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec3), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec3),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing CBOR string: unexpected end of input");
|
||||
}
|
||||
|
||||
|
@ -1081,26 +1095,28 @@ TEST_CASE("regression tests")
|
|||
0x60, 0x60, 0x60, 0x60, 0x60, 0x60, 0x60, 0x60,
|
||||
0x60, 0x60, 0x60, 0x60, 0x60, 0x60, 0x60, 0x60
|
||||
};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec),
|
||||
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec),
|
||||
"[json.exception.parse_error.113] parse error at byte 2: syntax error while parsing CBOR string: expected length specification (0x60-0x7B) or indefinite string type (0x7F); last byte: 0x98");
|
||||
|
||||
// related test case: nonempty UTF-8 string (indefinite length)
|
||||
std::vector<uint8_t> vec1 {0x7f, 0x61, 0x61};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec1),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec1),
|
||||
"[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing CBOR string: unexpected end of input");
|
||||
|
||||
// related test case: nonempty array (indefinite length)
|
||||
std::vector<uint8_t> vec2 {0x9f, 0x01};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec2),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec2),
|
||||
"[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing CBOR value: unexpected end of input");
|
||||
|
||||
// related test case: nonempty map (indefinite length)
|
||||
std::vector<uint8_t> vec3 {0xbf, 0x61, 0x61, 0x01};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec3), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec3),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec3), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec3),
|
||||
"[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing CBOR string: unexpected end of input");
|
||||
}
|
||||
|
||||
|
@ -1134,8 +1150,10 @@ TEST_CASE("regression tests")
|
|||
0x96, 0x96, 0xb4, 0xb4, 0xfa, 0x94, 0x94, 0x61,
|
||||
0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0xfa
|
||||
};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec1),
|
||||
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec1),
|
||||
"[json.exception.parse_error.113] parse error at byte 13: syntax error while parsing CBOR string: expected length specification (0x60-0x7B) or indefinite string type (0x7F); last byte: 0xB4");
|
||||
|
||||
// related test case: double-precision
|
||||
|
@ -1148,15 +1166,16 @@ TEST_CASE("regression tests")
|
|||
0x96, 0x96, 0xb4, 0xb4, 0xfa, 0x94, 0x94, 0x61,
|
||||
0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0xfb
|
||||
};
|
||||
CHECK_THROWS_AS(json::from_cbor(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_cbor(vec2),
|
||||
CHECK_THROWS_AS(_ = json::from_cbor(vec2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_cbor(vec2),
|
||||
"[json.exception.parse_error.113] parse error at byte 13: syntax error while parsing CBOR string: expected length specification (0x60-0x7B) or indefinite string type (0x7F); last byte: 0xB4");
|
||||
}
|
||||
|
||||
SECTION("issue #452 - Heap-buffer-overflow (OSS-Fuzz issue 585)")
|
||||
{
|
||||
std::vector<uint8_t> vec = {'-', '0', '1', '2', '2', '7', '4'};
|
||||
CHECK_THROWS_AS(json::parse(vec), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(vec), json::parse_error&);
|
||||
}
|
||||
|
||||
SECTION("issue #454 - doubles are printed as integers")
|
||||
|
@ -1304,8 +1323,9 @@ TEST_CASE("regression tests")
|
|||
|
||||
SECTION("issue #575 - heap-buffer-overflow (OSS-Fuzz 1400)")
|
||||
{
|
||||
json _;
|
||||
std::vector<uint8_t> vec = {'"', '\\', '"', 'X', '"', '"'};
|
||||
CHECK_THROWS_AS(json::parse(vec), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::parse(vec), json::parse_error&);
|
||||
}
|
||||
|
||||
SECTION("issue #600 - how does one convert a map in Json back to std::map?")
|
||||
|
@ -1344,7 +1364,8 @@ TEST_CASE("regression tests")
|
|||
SECTION("issue #602 - BOM not skipped when using json:parse(iterator)")
|
||||
{
|
||||
std::string i = "\xef\xbb\xbf{\n \"foo\": true\n}";
|
||||
CHECK_NOTHROW(json::parse(i.begin(), i.end()));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = json::parse(i.begin(), i.end()));
|
||||
}
|
||||
|
||||
SECTION("issue #702 - conversion from valarray<double> to json fails to build")
|
||||
|
@ -1407,7 +1428,8 @@ TEST_CASE("regression tests")
|
|||
); // handle different exceptions as 'file not found', 'permission denied'
|
||||
|
||||
is.open("test/data/regression/working_file.json");
|
||||
CHECK_NOTHROW(nlohmann::json::parse(is));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = nlohmann::json::parse(is));
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -1420,7 +1442,8 @@ TEST_CASE("regression tests")
|
|||
|
||||
is.open("test/data/json_nlohmann_tests/all_unicode.json.cbor",
|
||||
std::ios_base::in | std::ios_base::binary);
|
||||
CHECK_NOTHROW(nlohmann::json::from_cbor(is));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = nlohmann::json::from_cbor(is));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1500,13 +1523,14 @@ TEST_CASE("regression tests")
|
|||
|
||||
SECTION("issue #962 - Timeout (OSS-Fuzz 6034)")
|
||||
{
|
||||
json _;
|
||||
std::vector<uint8_t> v_ubjson = {'[', '$', 'Z', '#', 'L', 0x78, 0x28, 0x00, 0x68, 0x28, 0x69, 0x69, 0x17};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v_ubjson), json::out_of_range&);
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v_ubjson), json::out_of_range&);
|
||||
//CHECK_THROWS_WITH(json::from_ubjson(v_ubjson),
|
||||
// "[json.exception.out_of_range.408] excessive array size: 8658170730974374167");
|
||||
|
||||
v_ubjson[0] = '{';
|
||||
CHECK_THROWS_AS(json::from_ubjson(v_ubjson), json::out_of_range&);
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v_ubjson), json::out_of_range&);
|
||||
//CHECK_THROWS_WITH(json::from_ubjson(v_ubjson),
|
||||
// "[json.exception.out_of_range.408] excessive object size: 8658170730974374167");
|
||||
}
|
||||
|
@ -1780,7 +1804,8 @@ TEST_CASE("regression tests")
|
|||
}
|
||||
}
|
||||
|
||||
TEST_CASE("regression tests, exceptions dependent", "[!throws]")
|
||||
#if not defined(JSON_NOEXCEPTION)
|
||||
TEST_CASE("regression tests, exceptions dependent")
|
||||
{
|
||||
SECTION("issue #1340 - eof not set on exhausted input stream")
|
||||
{
|
||||
|
@ -1792,3 +1817,14 @@ TEST_CASE("regression tests, exceptions dependent", "[!throws]")
|
|||
CHECK(s.eof());
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
// for #1642
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
template <typename T> class array {};
|
||||
template <typename T> class object {};
|
||||
template <typename T> class string {};
|
||||
template <typename T> class number_integer {};
|
||||
template <typename T> class number_unsigned {};
|
||||
template <typename T> class number_float {};
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,11 +27,14 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
|
||||
TEST_CASE("serialization")
|
||||
{
|
||||
SECTION("operator<<")
|
||||
|
@ -170,4 +173,19 @@ TEST_CASE("serialization")
|
|||
test("\xE1\x80\xE2\xF0\x91\x92\xF1\xBF\x41", "\\ufffd" "\\ufffd" "\\ufffd" "\\ufffd" "\x41");
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("to_string")
|
||||
{
|
||||
auto test = [&](std::string const & input, std::string const & expected)
|
||||
{
|
||||
using std::to_string;
|
||||
json j = input;
|
||||
CHECK(to_string(j) == "\"" + expected + "\"");
|
||||
};
|
||||
|
||||
test("{\"x\":5,\"y\":6}", "{\\\"x\\\":5,\\\"y\\\":6}");
|
||||
test("{\"x\":[10,null,null,null]}", "{\\\"x\\\":[10,null,null,null]}");
|
||||
test("test", "test");
|
||||
test("[3,\"false\",false]", "[3,\\\"false\\\",false]");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,7 +27,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
@ -79,7 +79,8 @@ TEST_CASE("compliance tests from json.org")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
std::ifstream f(filename);
|
||||
CHECK_THROWS_AS(json::parse(f), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(f), json::parse_error&);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -387,38 +388,37 @@ TEST_CASE("json.org examples")
|
|||
SECTION("FILE 1.json")
|
||||
{
|
||||
std::unique_ptr<std::FILE, decltype(&std::fclose)> f(std::fopen("test/data/json.org/1.json", "r"), &std::fclose);
|
||||
json j;
|
||||
CHECK_NOTHROW(j.parse(f.get()));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = json::parse(f.get()));
|
||||
}
|
||||
|
||||
SECTION("FILE 2.json")
|
||||
{
|
||||
std::unique_ptr<std::FILE, decltype(&std::fclose)> f(std::fopen("test/data/json.org/2.json", "r"), &std::fclose);
|
||||
json j;
|
||||
CHECK_NOTHROW(j.parse(f.get()));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = json::parse(f.get()));
|
||||
}
|
||||
|
||||
SECTION("FILE 3.json")
|
||||
{
|
||||
std::unique_ptr<std::FILE, decltype(&std::fclose)> f(std::fopen("test/data/json.org/3.json", "r"), &std::fclose);
|
||||
json j;
|
||||
CHECK_NOTHROW(j.parse(f.get()));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = json::parse(f.get()));
|
||||
}
|
||||
|
||||
SECTION("FILE 4.json")
|
||||
{
|
||||
std::unique_ptr<std::FILE, decltype(&std::fclose)> f(std::fopen("test/data/json.org/4.json", "r"), &std::fclose);
|
||||
json j;
|
||||
CHECK_NOTHROW(j.parse(f.get()));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = json::parse(f.get()));
|
||||
}
|
||||
|
||||
SECTION("FILE 5.json")
|
||||
{
|
||||
std::unique_ptr<std::FILE, decltype(&std::fclose)> f(std::fopen("test/data/json.org/5.json", "r"), &std::fclose);
|
||||
json j;
|
||||
CHECK_NOTHROW(j.parse(f.get()));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = json::parse(f.get()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
TEST_CASE("RFC 7159 examples")
|
||||
|
@ -811,7 +811,8 @@ TEST_CASE("nst's JSONTestSuite")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
std::ifstream f(filename);
|
||||
CHECK_THROWS_AS(json::parse(f), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(f), json::parse_error&);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1028,7 +1029,8 @@ TEST_CASE("nst's JSONTestSuite (2)")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
std::ifstream f(filename);
|
||||
CHECK_NOTHROW(json::parse(f));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = json::parse(f));
|
||||
std::ifstream f2(filename);
|
||||
CHECK(json::accept(f2));
|
||||
}
|
||||
|
@ -1229,7 +1231,8 @@ TEST_CASE("nst's JSONTestSuite (2)")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
std::ifstream f(filename);
|
||||
CHECK_THROWS_AS(json::parse(f), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(f), json::parse_error&);
|
||||
std::ifstream f2(filename);
|
||||
CHECK(not json::accept(f2));
|
||||
}
|
||||
|
@ -1294,7 +1297,8 @@ TEST_CASE("nst's JSONTestSuite (2)")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
std::ifstream f(filename);
|
||||
CHECK_NOTHROW(json::parse(f));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = json::parse(f));
|
||||
std::ifstream f2(filename);
|
||||
CHECK(json::accept(f2));
|
||||
}
|
||||
|
@ -1344,7 +1348,8 @@ TEST_CASE("nst's JSONTestSuite (2)")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
std::ifstream f(filename);
|
||||
CHECK_THROWS_AS(json::parse(f), json::exception&); // could be parse_error or out_of_range
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(f), json::exception&); // could be parse_error or out_of_range
|
||||
std::ifstream f2(filename);
|
||||
CHECK(not json::accept(f2));
|
||||
}
|
||||
|
@ -1352,6 +1357,8 @@ TEST_CASE("nst's JSONTestSuite (2)")
|
|||
}
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
std::string trim(const std::string& str);
|
||||
|
||||
// from http://stackoverflow.com/a/25829178/266378
|
||||
|
@ -1365,6 +1372,7 @@ std::string trim(const std::string& str)
|
|||
size_t last = str.find_last_not_of(' ');
|
||||
return str.substr(first, (last - first + 1));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("Big List of Naughty Strings")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -31,11 +31,13 @@ SOFTWARE.
|
|||
// Only compile these tests if 'float' and 'double' are IEEE-754 single- and
|
||||
// double-precision numbers, resp.
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::detail::dtoa_impl::reinterpret_bits;
|
||||
|
||||
namespace
|
||||
{
|
||||
static float make_float(uint32_t sign_bit, uint32_t biased_exponent, uint32_t significand)
|
||||
{
|
||||
assert(sign_bit == 0 || sign_bit == 1);
|
||||
|
@ -139,6 +141,7 @@ static double make_double(uint64_t f, int e)
|
|||
uint64_t bits = (f & kSignificandMask) | (biased_exponent << kPhysicalSignificandSize);
|
||||
return reinterpret_bits<double>(bits);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("digit gen")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,13 +27,16 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
||||
#include <fstream>
|
||||
#include <set>
|
||||
|
||||
namespace
|
||||
{
|
||||
class SaxCountdown
|
||||
{
|
||||
public:
|
||||
|
@ -103,6 +106,7 @@ class SaxCountdown
|
|||
private:
|
||||
int events_left = 0;
|
||||
};
|
||||
}
|
||||
|
||||
TEST_CASE("UBJSON")
|
||||
{
|
||||
|
@ -1297,8 +1301,9 @@ TEST_CASE("UBJSON")
|
|||
|
||||
SECTION("strict mode")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_ubjson(vec), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vec),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vec), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vec),
|
||||
"[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing UBJSON value: expected end of input; last byte: 0x5A");
|
||||
}
|
||||
}
|
||||
|
@ -1307,8 +1312,9 @@ TEST_CASE("UBJSON")
|
|||
{
|
||||
// larger than max int64
|
||||
json j = 9223372036854775808llu;
|
||||
CHECK_THROWS_AS(json::to_ubjson(j), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(json::to_ubjson(j), "[json.exception.out_of_range.407] integer number 9223372036854775808 cannot be represented by UBJSON as it does not fit int64");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::to_ubjson(j), json::out_of_range&);
|
||||
CHECK_THROWS_WITH(_ = json::to_ubjson(j), "[json.exception.out_of_range.407] integer number 9223372036854775808 cannot be represented by UBJSON as it does not fit int64");
|
||||
}
|
||||
|
||||
SECTION("excessive size")
|
||||
|
@ -1316,27 +1322,29 @@ TEST_CASE("UBJSON")
|
|||
SECTION("array")
|
||||
{
|
||||
std::vector<uint8_t> v_ubjson = {'[', '$', 'Z', '#', 'L', 0x78, 0x28, 0x00, 0x68, 0x28, 0x69, 0x69, 0x17};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v_ubjson), json::out_of_range&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v_ubjson), json::out_of_range&);
|
||||
|
||||
json j;
|
||||
nlohmann::detail::json_sax_dom_callback_parser<json> scp(j, [](int, json::parse_event_t, const json&)
|
||||
{
|
||||
return true;
|
||||
});
|
||||
CHECK_THROWS_AS(json::sax_parse(v_ubjson, &scp, json::input_format_t::ubjson), json::out_of_range&);
|
||||
CHECK_THROWS_AS(_ = json::sax_parse(v_ubjson, &scp, json::input_format_t::ubjson), json::out_of_range&);
|
||||
}
|
||||
|
||||
SECTION("object")
|
||||
{
|
||||
std::vector<uint8_t> v_ubjson = {'{', '$', 'Z', '#', 'L', 0x78, 0x28, 0x00, 0x68, 0x28, 0x69, 0x69, 0x17};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v_ubjson), json::out_of_range&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v_ubjson), json::out_of_range&);
|
||||
|
||||
json j;
|
||||
nlohmann::detail::json_sax_dom_callback_parser<json> scp(j, [](int, json::parse_event_t, const json&)
|
||||
{
|
||||
return true;
|
||||
});
|
||||
CHECK_THROWS_AS(json::sax_parse(v_ubjson, &scp, json::input_format_t::ubjson), json::out_of_range&);
|
||||
CHECK_THROWS_AS(_ = json::sax_parse(v_ubjson, &scp, json::input_format_t::ubjson), json::out_of_range&);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1527,8 +1535,9 @@ TEST_CASE("UBJSON")
|
|||
{
|
||||
SECTION("empty byte vector")
|
||||
{
|
||||
CHECK_THROWS_AS(json::from_ubjson(std::vector<uint8_t>()), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(std::vector<uint8_t>()),
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(std::vector<uint8_t>()), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(std::vector<uint8_t>()),
|
||||
"[json.exception.parse_error.110] parse error at byte 1: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
}
|
||||
|
||||
|
@ -1537,15 +1546,17 @@ TEST_CASE("UBJSON")
|
|||
SECTION("eof after C byte")
|
||||
{
|
||||
std::vector<uint8_t> v = {'C'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v), "[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing UBJSON char: unexpected end of input");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v), "[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing UBJSON char: unexpected end of input");
|
||||
}
|
||||
|
||||
SECTION("byte out of range")
|
||||
{
|
||||
std::vector<uint8_t> v = {'C', 130};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v), "[json.exception.parse_error.113] parse error at byte 2: syntax error while parsing UBJSON char: byte after 'C' must be in range 0x00..0x7F; last byte: 0x82");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v), "[json.exception.parse_error.113] parse error at byte 2: syntax error while parsing UBJSON char: byte after 'C' must be in range 0x00..0x7F; last byte: 0x82");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1554,15 +1565,17 @@ TEST_CASE("UBJSON")
|
|||
SECTION("eof after S byte")
|
||||
{
|
||||
std::vector<uint8_t> v = {'S'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v), "[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v), "[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
}
|
||||
|
||||
SECTION("invalid byte")
|
||||
{
|
||||
std::vector<uint8_t> v = {'S', '1', 'a'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v), "[json.exception.parse_error.113] parse error at byte 2: syntax error while parsing UBJSON string: expected length type specification (U, i, I, l, L); last byte: 0x31");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v), "[json.exception.parse_error.113] parse error at byte 2: syntax error while parsing UBJSON string: expected length type specification (U, i, I, l, L); last byte: 0x31");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1571,138 +1584,144 @@ TEST_CASE("UBJSON")
|
|||
SECTION("optimized array: no size following type")
|
||||
{
|
||||
std::vector<uint8_t> v = {'[', '$', 'i', 2};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v), "[json.exception.parse_error.112] parse error at byte 4: syntax error while parsing UBJSON size: expected '#' after type information; last byte: 0x02");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v), "[json.exception.parse_error.112] parse error at byte 4: syntax error while parsing UBJSON size: expected '#' after type information; last byte: 0x02");
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("strings")
|
||||
{
|
||||
std::vector<uint8_t> vS = {'S'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vS), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vS), "[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vS), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vS), "[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vS, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> v = {'S', 'i', '2', 'a'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v), "[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing UBJSON string: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v), "[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing UBJSON string: unexpected end of input");
|
||||
CHECK(json::from_ubjson(v, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vC = {'C'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vC), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vC), "[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing UBJSON char: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vC), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vC), "[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing UBJSON char: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vC, true, false).is_discarded());
|
||||
}
|
||||
|
||||
SECTION("sizes")
|
||||
{
|
||||
std::vector<uint8_t> vU = {'[', '#', 'U'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vU), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vU), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vU), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vU), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vU, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vi = {'[', '#', 'i'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vi), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vi), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vi), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vi), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vi, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vI = {'[', '#', 'I'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vI), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vI), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vI), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vI), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vI, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vl = {'[', '#', 'l'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vl), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vl), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vl), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vl), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vl, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vL = {'[', '#', 'L'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vL), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vL), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vL), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vL), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vL, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> v0 = {'[', '#', 'T', ']'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v0), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v0), "[json.exception.parse_error.113] parse error at byte 3: syntax error while parsing UBJSON size: expected length type specification (U, i, I, l, L) after '#'; last byte: 0x54");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v0), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v0), "[json.exception.parse_error.113] parse error at byte 3: syntax error while parsing UBJSON size: expected length type specification (U, i, I, l, L) after '#'; last byte: 0x54");
|
||||
CHECK(json::from_ubjson(v0, true, false).is_discarded());
|
||||
}
|
||||
|
||||
SECTION("types")
|
||||
{
|
||||
std::vector<uint8_t> v0 = {'[', '$'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v0), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v0), "[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing UBJSON type: unexpected end of input");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v0), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v0), "[json.exception.parse_error.110] parse error at byte 3: syntax error while parsing UBJSON type: unexpected end of input");
|
||||
CHECK(json::from_ubjson(v0, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vi = {'[', '$', '#'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vi), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vi), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vi), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vi), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vi, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vT = {'[', '$', 'T'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vT), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vT), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vT), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vT), "[json.exception.parse_error.110] parse error at byte 4: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vT, true, false).is_discarded());
|
||||
}
|
||||
|
||||
SECTION("arrays")
|
||||
{
|
||||
std::vector<uint8_t> vST = {'[', '$', 'i', '#', 'i', 2, 1};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vST), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vST), "[json.exception.parse_error.110] parse error at byte 8: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vST), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vST), "[json.exception.parse_error.110] parse error at byte 8: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vST, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vS = {'[', '#', 'i', 2, 'i', 1};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vS), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vS), "[json.exception.parse_error.110] parse error at byte 7: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vS), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vS), "[json.exception.parse_error.110] parse error at byte 7: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vS, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> v = {'[', 'i', 2, 'i', 1};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v), "[json.exception.parse_error.110] parse error at byte 6: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v), "[json.exception.parse_error.110] parse error at byte 6: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK(json::from_ubjson(v, true, false).is_discarded());
|
||||
}
|
||||
|
||||
SECTION("objects")
|
||||
{
|
||||
std::vector<uint8_t> vST = {'{', '$', 'i', '#', 'i', 2, 'i', 1, 'a', 1};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vST), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vST), "[json.exception.parse_error.110] parse error at byte 11: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vST), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vST), "[json.exception.parse_error.110] parse error at byte 11: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vST, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vT = {'{', '$', 'i', 'i', 1, 'a', 1};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vT), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vT), "[json.exception.parse_error.112] parse error at byte 4: syntax error while parsing UBJSON size: expected '#' after type information; last byte: 0x69");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vT), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vT), "[json.exception.parse_error.112] parse error at byte 4: syntax error while parsing UBJSON size: expected '#' after type information; last byte: 0x69");
|
||||
CHECK(json::from_ubjson(vT, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vS = {'{', '#', 'i', 2, 'i', 1, 'a', 'i', 1};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vS), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vS), "[json.exception.parse_error.110] parse error at byte 10: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vS), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vS), "[json.exception.parse_error.110] parse error at byte 10: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vS, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> v = {'{', 'i', 1, 'a', 'i', 1};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v), "[json.exception.parse_error.110] parse error at byte 7: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v), "[json.exception.parse_error.110] parse error at byte 7: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK(json::from_ubjson(v, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> v2 = {'{', 'i', 1, 'a', 'i', 1, 'i'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v2), "[json.exception.parse_error.110] parse error at byte 8: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v2), "[json.exception.parse_error.110] parse error at byte 8: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK(json::from_ubjson(v2, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> v3 = {'{', 'i', 1, 'a'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(v3), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(v3), "[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(v3), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(v3), "[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK(json::from_ubjson(v3, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vST1 = {'{', '$', 'd', '#', 'i', 2, 'i', 1, 'a'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vST1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vST1), "[json.exception.parse_error.110] parse error at byte 10: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vST1), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vST1), "[json.exception.parse_error.110] parse error at byte 10: syntax error while parsing UBJSON number: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vST1, true, false).is_discarded());
|
||||
|
||||
std::vector<uint8_t> vST2 = {'{', '#', 'i', 2, 'i', 1, 'a'};
|
||||
CHECK_THROWS_AS(json::from_ubjson(vST2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::from_ubjson(vST2), "[json.exception.parse_error.110] parse error at byte 8: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK_THROWS_AS(_ = json::from_ubjson(vST2), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::from_ubjson(vST2), "[json.exception.parse_error.110] parse error at byte 8: syntax error while parsing UBJSON value: unexpected end of input");
|
||||
CHECK(json::from_ubjson(vST2, true, false).is_discarded());
|
||||
}
|
||||
}
|
||||
|
@ -2113,7 +2132,8 @@ TEST_CASE("Universal Binary JSON Specification Examples 1")
|
|||
}
|
||||
}
|
||||
|
||||
TEST_CASE("all UBJSON first bytes", "[!throws]")
|
||||
#if not defined(JSON_NOEXCEPTION)
|
||||
TEST_CASE("all UBJSON first bytes")
|
||||
{
|
||||
// these bytes will fail immediately with exception parse_error.112
|
||||
std::set<uint8_t> supported =
|
||||
|
@ -2134,7 +2154,7 @@ TEST_CASE("all UBJSON first bytes", "[!throws]")
|
|||
{
|
||||
// check that parse_error.112 is only thrown if the
|
||||
// first byte is not in the supported set
|
||||
CAPTURE(e.what())
|
||||
INFO_WITH_TEMP(e.what());
|
||||
if (std::find(supported.begin(), supported.end(), byte) == supported.end())
|
||||
{
|
||||
CHECK(e.id == 112);
|
||||
|
@ -2146,8 +2166,9 @@ TEST_CASE("all UBJSON first bytes", "[!throws]")
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
TEST_CASE("UBJSON roundtrips", "[hide]")
|
||||
TEST_CASE("UBJSON roundtrips" * doctest::skip())
|
||||
{
|
||||
SECTION("input from self-generated UBJSON files")
|
||||
{
|
||||
|
@ -2199,8 +2220,8 @@ TEST_CASE("UBJSON roundtrips", "[hide]")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
|
||||
SECTION(filename + ": std::vector<uint8_t>")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": std::vector<uint8_t>");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -2217,8 +2238,8 @@ TEST_CASE("UBJSON roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": std::ifstream")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": std::ifstream");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -2232,8 +2253,8 @@ TEST_CASE("UBJSON roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": uint8_t* and size")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": uint8_t* and size");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -2250,8 +2271,8 @@ TEST_CASE("UBJSON roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": output to output adapters")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": output to output adapters");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
@ -2262,8 +2283,8 @@ TEST_CASE("UBJSON roundtrips", "[hide]")
|
|||
(std::istreambuf_iterator<char>(f_ubjson)),
|
||||
std::istreambuf_iterator<char>());
|
||||
|
||||
SECTION(filename + ": output adapters: std::vector<uint8_t>")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": output adapters: std::vector<uint8_t>");
|
||||
std::vector<uint8_t> vec;
|
||||
json::to_ubjson(j1, vec);
|
||||
CHECK(vec == packed);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,10 +27,9 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
#include <array>
|
||||
|
@ -236,7 +235,7 @@ void from_json(const nlohmann::json& j, contact_book& cb)
|
|||
}
|
||||
}
|
||||
|
||||
TEST_CASE("basic usage", "[udt]")
|
||||
TEST_CASE("basic usage" * doctest::test_suite("udt"))
|
||||
{
|
||||
|
||||
// a bit narcissic maybe :) ?
|
||||
|
@ -392,7 +391,7 @@ struct adl_serializer<udt::legacy_type>
|
|||
};
|
||||
}
|
||||
|
||||
TEST_CASE("adl_serializer specialization", "[udt]")
|
||||
TEST_CASE("adl_serializer specialization" * doctest::test_suite("udt"))
|
||||
{
|
||||
SECTION("partial specialization")
|
||||
{
|
||||
|
@ -468,7 +467,7 @@ struct adl_serializer<std::vector<float>>
|
|||
};
|
||||
}
|
||||
|
||||
TEST_CASE("even supported types can be specialized", "[udt]")
|
||||
TEST_CASE("even supported types can be specialized" * doctest::test_suite("udt"))
|
||||
{
|
||||
json j = std::vector<float> {1.0, 2.0, 3.0};
|
||||
CHECK(j.dump() == R"("hijacked!")");
|
||||
|
@ -509,7 +508,7 @@ struct adl_serializer<std::unique_ptr<T>>
|
|||
};
|
||||
}
|
||||
|
||||
TEST_CASE("Non-copyable types", "[udt]")
|
||||
TEST_CASE("Non-copyable types" * doctest::test_suite("udt"))
|
||||
{
|
||||
SECTION("to_json")
|
||||
{
|
||||
|
@ -650,7 +649,7 @@ std::ostream& operator<<(std::ostream& os, small_pod l)
|
|||
}
|
||||
}
|
||||
|
||||
TEST_CASE("custom serializer for pods", "[udt]")
|
||||
TEST_CASE("custom serializer for pods" * doctest::test_suite("udt"))
|
||||
{
|
||||
using custom_json =
|
||||
nlohmann::basic_json<std::map, std::vector, std::string, bool,
|
||||
|
@ -691,7 +690,7 @@ struct another_adl_serializer
|
|||
}
|
||||
};
|
||||
|
||||
TEST_CASE("custom serializer that does adl by default", "[udt]")
|
||||
TEST_CASE("custom serializer that does adl by default" * doctest::test_suite("udt"))
|
||||
{
|
||||
using json = nlohmann::json;
|
||||
|
||||
|
@ -797,7 +796,7 @@ template <typename T>
|
|||
struct is_constructible_patched<T, decltype(void(json(std::declval<T>())))> : std::true_type {};
|
||||
}
|
||||
|
||||
TEST_CASE("an incomplete type does not trigger a compiler error in non-evaluated context", "[udt]")
|
||||
TEST_CASE("an incomplete type does not trigger a compiler error in non-evaluated context" * doctest::test_suite("udt"))
|
||||
{
|
||||
static_assert(not is_constructible_patched<json, incomplete>::value, "");
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,15 +27,23 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
// for some reason including this after the json header leads to linker errors with VS 2017...
|
||||
#include <locale>
|
||||
|
||||
#define private public
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
#undef private
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
|
||||
namespace
|
||||
{
|
||||
extern size_t calls;
|
||||
size_t calls = 0;
|
||||
|
||||
|
@ -150,17 +158,19 @@ void check_utf8string(bool success_expected, int byte1, int byte2 = -1, int byte
|
|||
|
||||
CAPTURE(json_string)
|
||||
|
||||
json _;
|
||||
if (success_expected)
|
||||
{
|
||||
CHECK_NOTHROW(json::parse(json_string));
|
||||
CHECK_NOTHROW(_ = json::parse(json_string));
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_THROWS_AS(json::parse(json_string), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::parse(json_string), json::parse_error&);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("Unicode", "[hide]")
|
||||
TEST_CASE("Unicode" * doctest::skip())
|
||||
{
|
||||
SECTION("RFC 3629")
|
||||
{
|
||||
|
@ -1042,7 +1052,8 @@ TEST_CASE("Unicode", "[hide]")
|
|||
|
||||
json_text += "\"";
|
||||
CAPTURE(json_text)
|
||||
CHECK_NOTHROW(json::parse(json_text));
|
||||
json _;
|
||||
CHECK_NOTHROW(_ = json::parse(json_text));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1050,32 +1061,34 @@ TEST_CASE("Unicode", "[hide]")
|
|||
{
|
||||
SECTION("incorrect surrogate values")
|
||||
{
|
||||
CHECK_THROWS_AS(json::parse("\"\\uDC00\\uDC00\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uDC00\\uDC00\""),
|
||||
json _;
|
||||
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uDC00\\uDC00\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uDC00\\uDC00\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must follow U+D800..U+DBFF; last read: '\"\\uDC00'");
|
||||
|
||||
CHECK_THROWS_AS(json::parse("\"\\uD7FF\\uDC00\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uD7FF\\uDC00\""),
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uD7FF\\uDC00\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uD7FF\\uDC00\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must follow U+D800..U+DBFF; last read: '\"\\uD7FF\\uDC00'");
|
||||
|
||||
CHECK_THROWS_AS(json::parse("\"\\uD800]\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uD800]\""),
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uD800]\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uD800]\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 8: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD800]'");
|
||||
|
||||
CHECK_THROWS_AS(json::parse("\"\\uD800\\v\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uD800\\v\""),
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uD800\\v\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uD800\\v\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 9: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD800\\v'");
|
||||
|
||||
CHECK_THROWS_AS(json::parse("\"\\uD800\\u123\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uD800\\u123\""),
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uD800\\u123\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uD800\\u123\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\uD800\\u123\"'");
|
||||
|
||||
CHECK_THROWS_AS(json::parse("\"\\uD800\\uDBFF\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uD800\\uDBFF\""),
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uD800\\uDBFF\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uD800\\uDBFF\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD800\\uDBFF'");
|
||||
|
||||
CHECK_THROWS_AS(json::parse("\"\\uD800\\uE000\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(json::parse("\"\\uD800\\uE000\""),
|
||||
CHECK_THROWS_AS(_ = json::parse("\"\\uD800\\uE000\""), json::parse_error&);
|
||||
CHECK_THROWS_WITH(_ = json::parse("\"\\uD800\\uE000\""),
|
||||
"[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD800\\uE000'");
|
||||
}
|
||||
}
|
||||
|
@ -1194,16 +1207,20 @@ TEST_CASE("Unicode", "[hide]")
|
|||
|
||||
SECTION("error for incomplete/wrong BOM")
|
||||
{
|
||||
CHECK_THROWS_AS(json::parse("\xef\xbb"), json::parse_error&);
|
||||
CHECK_THROWS_AS(json::parse("\xef\xbb\xbb"), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse("\xef\xbb"), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::parse("\xef\xbb\xbb"), json::parse_error&);
|
||||
}
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
void roundtrip(bool success_expected, const std::string& s);
|
||||
|
||||
void roundtrip(bool success_expected, const std::string& s)
|
||||
{
|
||||
CAPTURE(s)
|
||||
json _;
|
||||
|
||||
// create JSON string value
|
||||
json j = s;
|
||||
|
@ -1219,11 +1236,11 @@ void roundtrip(bool success_expected, const std::string& s)
|
|||
if (s[0] != '\0')
|
||||
{
|
||||
// parsing JSON text succeeds
|
||||
CHECK_NOTHROW(json::parse(ps));
|
||||
CHECK_NOTHROW(_ = json::parse(ps));
|
||||
}
|
||||
|
||||
// roundtrip succeeds
|
||||
CHECK_NOTHROW(json::parse(j.dump()));
|
||||
CHECK_NOTHROW(_ = json::parse(j.dump()));
|
||||
|
||||
// after roundtrip, the same string is stored
|
||||
json jr = json::parse(j.dump());
|
||||
|
@ -1235,9 +1252,10 @@ void roundtrip(bool success_expected, const std::string& s)
|
|||
CHECK_THROWS_AS(j.dump(), json::type_error&);
|
||||
|
||||
// parsing JSON text fails
|
||||
CHECK_THROWS_AS(json::parse(ps), json::parse_error&);
|
||||
CHECK_THROWS_AS(_ = json::parse(ps), json::parse_error&);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("Markus Kuhn's UTF-8 decoder capability and stress test")
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,12 +27,13 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
namespace
|
||||
{
|
||||
bool wstring_is_utf16();
|
||||
bool wstring_is_utf16()
|
||||
{
|
||||
|
@ -50,6 +51,7 @@ bool u32string_is_utf32()
|
|||
{
|
||||
return (std::u32string(U"💩") == std::u32string(U"\U0001F4A9"));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("wide strings")
|
||||
{
|
||||
|
@ -68,7 +70,8 @@ TEST_CASE("wide strings")
|
|||
if (wstring_is_utf16())
|
||||
{
|
||||
std::wstring w = L"\"\xDBFF";
|
||||
CHECK_THROWS_AS(json::parse(w), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(w), json::parse_error&);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -87,7 +90,8 @@ TEST_CASE("wide strings")
|
|||
if (wstring_is_utf16())
|
||||
{
|
||||
std::u16string w = u"\"\xDBFF";
|
||||
CHECK_THROWS_AS(json::parse(w), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(w), json::parse_error&);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,7 +110,8 @@ TEST_CASE("wide strings")
|
|||
if (u32string_is_utf32())
|
||||
{
|
||||
std::u32string w = U"\"\x110000";
|
||||
CHECK_THROWS_AS(json::parse(w), json::parse_error&);
|
||||
json _;
|
||||
CHECK_THROWS_AS(_ = json::parse(w), json::parse_error&);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
__ _____ _____ _____
|
||||
__| | __| | | | JSON for Modern C++ (test suite)
|
||||
| | |__ | | | | | | version 3.6.1
|
||||
| | |__ | | | | | | version 3.7.0
|
||||
|_____|_____|_____|_|___| https://github.com/nlohmann/json
|
||||
|
||||
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
|
@ -27,5 +27,5 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#define CATCH_CONFIG_MAIN
|
||||
#include "catch.hpp"
|
||||
#define DOCTEST_CONFIG_IMPLEMENT_WITH_MAIN
|
||||
#include "doctest_compatibility.h"
|
||||
|
|
23
test/thirdparty/catch/LICENSE_1_0.txt
vendored
23
test/thirdparty/catch/LICENSE_1_0.txt
vendored
|
@ -1,23 +0,0 @@
|
|||
Boost Software License - Version 1.0 - August 17th, 2003
|
||||
|
||||
Permission is hereby granted, free of charge, to any person or organization
|
||||
obtaining a copy of the software and accompanying documentation covered by
|
||||
this license (the "Software") to use, reproduce, display, distribute,
|
||||
execute, and transmit the Software, and to prepare derivative works of the
|
||||
Software, and to permit third-parties to whom the Software is furnished to
|
||||
do so, all subject to the following:
|
||||
|
||||
The copyright notices in the Software and this entire statement, including
|
||||
the above license grant, this restriction and the following disclaimer,
|
||||
must be included in all copies of the Software, in whole or in part, and
|
||||
all derivative works of the Software, unless such copies or derivative
|
||||
works are solely in the form of machine-executable object code generated by
|
||||
a source language processor.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
|
||||
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
|
||||
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
11689
test/thirdparty/catch/catch.hpp
vendored
11689
test/thirdparty/catch/catch.hpp
vendored
File diff suppressed because it is too large
Load diff
21
test/thirdparty/doctest/LICENSE.txt
vendored
Normal file
21
test/thirdparty/doctest/LICENSE.txt
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016-2018 Viktor Kirilov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
6025
test/thirdparty/doctest/doctest.h
vendored
Normal file
6025
test/thirdparty/doctest/doctest.h
vendored
Normal file
File diff suppressed because it is too large
Load diff
37
test/thirdparty/doctest/doctest_compatibility.h
vendored
Normal file
37
test/thirdparty/doctest/doctest_compatibility.h
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
#ifndef DOCTEST_COMPATIBILITY
|
||||
#define DOCTEST_COMPATIBILITY
|
||||
|
||||
#define DOCTEST_CONFIG_VOID_CAST_EXPRESSIONS
|
||||
#define DOCTEST_THREAD_LOCAL // enable single-threaded builds on XCode 6/7 - https://github.com/onqtam/doctest/issues/172
|
||||
#include "doctest.h"
|
||||
|
||||
// Catch doesn't require a semicolon after CAPTURE but doctest does
|
||||
#undef CAPTURE
|
||||
#define CAPTURE(x) DOCTEST_CAPTURE(x);
|
||||
|
||||
// Sections from Catch are called Subcases in doctest and don't work with std::string by default
|
||||
#undef SUBCASE
|
||||
#define SECTION(x) DOCTEST_SUBCASE(x)
|
||||
|
||||
// convenience macro around INFO since it doesn't support temporaries (it is optimized to avoid allocations for runtime speed)
|
||||
#define INFO_WITH_TEMP_IMPL(x, var_name) const auto var_name = x; INFO(var_name) // lvalue!
|
||||
#define INFO_WITH_TEMP(x) INFO_WITH_TEMP_IMPL(x, DOCTEST_ANONYMOUS(DOCTEST_STD_STRING_))
|
||||
|
||||
// doctest doesn't support THROWS_WITH for std::string out of the box (has to include <string>...)
|
||||
#define CHECK_THROWS_WITH_STD_STR_IMPL(expr, str, var_name) \
|
||||
do { \
|
||||
std::string var_name = str; \
|
||||
CHECK_THROWS_WITH(expr, var_name.c_str()); \
|
||||
} while (false)
|
||||
#define CHECK_THROWS_WITH_STD_STR(expr, str) \
|
||||
CHECK_THROWS_WITH_STD_STR_IMPL(expr, str, DOCTEST_ANONYMOUS(DOCTEST_STD_STRING_))
|
||||
|
||||
// included here because for some tests in the json repository private is defined as
|
||||
// public and if no STL header is included before that then in the json include when STL
|
||||
// stuff is included the MSVC STL complains (errors) that C++ keywords are being redefined
|
||||
#include <iosfwd>
|
||||
|
||||
// Catch does this by default
|
||||
using doctest::Approx;
|
||||
|
||||
#endif
|
21
test/thirdparty/fastcov/LICENSE
vendored
Normal file
21
test/thirdparty/fastcov/LICENSE
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License
|
||||
|
||||
Copyright (c) 2018-2019 Bryan Gillespie
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
46
test/thirdparty/fastcov/README.md
vendored
Normal file
46
test/thirdparty/fastcov/README.md
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
# fastcov
|
||||
A massively parallel gcov wrapper for generating intermediate coverage formats *fast*
|
||||
|
||||
The goal of fastcov is to generate code coverage intermediate formats *as fast as possible* (ideally < 1 second), even for large projects with hundreds of gcda objects. The intermediate formats may then be consumed by a report generator such as lcov's genhtml, or a dedicated front end such as coveralls. fastcov was originally designed to be a drop-in replacement for lcov (application coverage only, not kernel coverage).
|
||||
|
||||
Currently the only intermediate formats supported are gcov json format and lcov info format. Adding support for other formats should require just a few lines of python to transform gcov json format to the desired shape.
|
||||
|
||||
In order to achieve the massive speed gains, a few constraints apply:
|
||||
|
||||
1. GCC version >= 9.0.0
|
||||
|
||||
These versions of GCOV have support for JSON intermediate format as well as streaming report data straight to stdout
|
||||
|
||||
2. Object files must be either be built:
|
||||
|
||||
- Using absolute paths for all `-I` flags passed to the compiler
|
||||
- Invoking the compiler from the same root directory
|
||||
|
||||
If you use CMake, you are almost certainly satisfying the second constraint (unless you care about `ExternalProject` coverage).
|
||||
|
||||
## Sample Usage:
|
||||
```bash
|
||||
$ cd build_dir
|
||||
$ fastcov.py --zerocounters
|
||||
$ <run unit tests>
|
||||
$ fastcov.py --exclude /usr/include --lcov -o report.info
|
||||
$ genhtml -o code_coverage report.info
|
||||
```
|
||||
|
||||
## Legacy fastcov
|
||||
|
||||
It is possible to reap most of the benefits of fastcov for GCC version < 9.0.0 and >= 7.1.0. However, there will be a *potential* header file loss of correctness.
|
||||
|
||||
`fastcov_legacy.py` can be used with pre GCC-9 down to GCC 7.1.0 but with a few penalties due to gcov limitations. This is because running gcov in parallel generates .gcov header reports in parallel which overwrite each other. This isn't a problem unless your header files have actual logic (i.e. header only library) that you want to measure coverage for. Use the `-F` flag to specify which gcda files should not be run in parallel in order to capture accurate header file data just for those. I don't plan on supporting `fastcov_legacy.py` aside from basic bug fixes.
|
||||
|
||||
## Benchmarks
|
||||
|
||||
Anecdotal testing on my own projects indicate that fastcov is over 100x faster than lcov and over 30x faster than gcovr:
|
||||
|
||||
Project Size: ~250 .gcda, ~500 .gcov generated by gcov
|
||||
|
||||
Time to process all gcda and parse all gcov:
|
||||
|
||||
- fastcov: ~700ms
|
||||
- lcov: ~90s
|
||||
- gcovr: ~30s
|
372
test/thirdparty/fastcov/fastcov.py
vendored
Executable file
372
test/thirdparty/fastcov/fastcov.py
vendored
Executable file
|
@ -0,0 +1,372 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Author: Bryan Gillespie
|
||||
|
||||
A massively parallel gcov wrapper for generating intermediate coverage formats fast
|
||||
|
||||
The goal of fastcov is to generate code coverage intermediate formats as fast as possible
|
||||
(ideally < 1 second), even for large projects with hundreds of gcda objects. The intermediate
|
||||
formats may then be consumed by a report generator such as lcov's genhtml, or a dedicated front
|
||||
end such as coveralls.
|
||||
|
||||
Sample Usage:
|
||||
$ cd build_dir
|
||||
$ ./fastcov.py --zerocounters
|
||||
$ <run unit tests>
|
||||
$ ./fastcov.py --exclude /usr/include test/ --lcov -o report.info
|
||||
$ genhtml -o code_coverage report.info
|
||||
"""
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
import json
|
||||
import time
|
||||
import argparse
|
||||
import threading
|
||||
import subprocess
|
||||
import multiprocessing
|
||||
|
||||
MINIMUM_GCOV = (9,0,0)
|
||||
MINIMUM_CHUNK_SIZE = 5
|
||||
|
||||
# Interesting metrics
|
||||
START_TIME = time.time()
|
||||
GCOVS_TOTAL = []
|
||||
GCOVS_SKIPPED = []
|
||||
|
||||
def chunks(l, n):
|
||||
"""Yield successive n-sized chunks from l."""
|
||||
for i in range(0, len(l), n):
|
||||
yield l[i:i + n]
|
||||
|
||||
def stopwatch():
|
||||
"""Return number of seconds since last time this was called"""
|
||||
global START_TIME
|
||||
end_time = time.time()
|
||||
delta = end_time - START_TIME
|
||||
START_TIME = end_time
|
||||
return delta
|
||||
|
||||
def parseVersionFromLine(version_str):
|
||||
"""Given a string containing a dotted integer version, parse out integers and return as tuple"""
|
||||
version = re.search(r'(\d+\.\d+\.\d+)[^\.]', version_str)
|
||||
|
||||
if not version:
|
||||
return (0,0,0)
|
||||
|
||||
return tuple(map(int, version.group(1).split(".")))
|
||||
|
||||
def getGcovVersion(gcov):
|
||||
p = subprocess.Popen([gcov, "-v"], stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0].decode('UTF-8')
|
||||
p.wait()
|
||||
return parseVersionFromLine(output.split("\n")[0])
|
||||
|
||||
def removeFiles(files):
|
||||
for file in files:
|
||||
os.remove(file)
|
||||
|
||||
def getFilteredGcdaFiles(gcda_files, exclude):
|
||||
def excludeGcda(gcda):
|
||||
for ex in exclude:
|
||||
if ex in gcda:
|
||||
return False
|
||||
return True
|
||||
return list(filter(excludeGcda, gcda_files))
|
||||
|
||||
def getGcdaFiles(cwd, gcda_files):
|
||||
if not gcda_files:
|
||||
gcda_files = glob.glob(os.path.join(os.path.abspath(cwd), "**/*.gcda"), recursive=True)
|
||||
return gcda_files
|
||||
|
||||
def gcovWorker(cwd, gcov, files, chunk, gcov_filter_options, branch_coverage):
|
||||
gcov_args = "-it"
|
||||
if branch_coverage:
|
||||
gcov_args += "b"
|
||||
|
||||
p = subprocess.Popen([gcov, gcov_args] + chunk, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
|
||||
for line in iter(p.stdout.readline, b''):
|
||||
intermediate_json = json.loads(line.decode(sys.stdout.encoding))
|
||||
intermediate_json_files = processGcovs(cwd, intermediate_json["files"], gcov_filter_options)
|
||||
for f in intermediate_json_files:
|
||||
files.append(f) #thread safe, there might be a better way to do this though
|
||||
GCOVS_TOTAL.append(len(intermediate_json["files"]))
|
||||
GCOVS_SKIPPED.append(len(intermediate_json["files"])-len(intermediate_json_files))
|
||||
p.wait()
|
||||
|
||||
def processGcdas(cwd, gcov, jobs, gcda_files, gcov_filter_options, branch_coverage):
|
||||
chunk_size = max(MINIMUM_CHUNK_SIZE, int(len(gcda_files) / jobs) + 1)
|
||||
|
||||
threads = []
|
||||
intermediate_json_files = []
|
||||
for chunk in chunks(gcda_files, chunk_size):
|
||||
t = threading.Thread(target=gcovWorker, args=(cwd, gcov, intermediate_json_files, chunk, gcov_filter_options, branch_coverage))
|
||||
threads.append(t)
|
||||
t.start()
|
||||
|
||||
log("Spawned %d gcov threads, each processing at most %d gcda files" % (len(threads), chunk_size))
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
return intermediate_json_files
|
||||
|
||||
def processGcov(cwd, gcov, files, gcov_filter_options):
|
||||
# Add absolute path
|
||||
gcov["file_abs"] = os.path.abspath(os.path.join(cwd, gcov["file"]))
|
||||
|
||||
# If explicit sources were passed, check for match
|
||||
if gcov_filter_options["sources"]:
|
||||
if gcov["file_abs"] in gcov_filter_options["sources"]:
|
||||
files.append(gcov)
|
||||
return
|
||||
|
||||
# Check include filter
|
||||
if gcov_filter_options["include"]:
|
||||
for ex in gcov_filter_options["include"]:
|
||||
if ex in gcov["file"]:
|
||||
files.append(gcov)
|
||||
break
|
||||
return
|
||||
|
||||
# Check exclude filter
|
||||
for ex in gcov_filter_options["exclude"]:
|
||||
if ex in gcov["file"]:
|
||||
return
|
||||
|
||||
files.append(gcov)
|
||||
|
||||
def processGcovs(cwd, gcov_files, gcov_filter_options):
|
||||
files = []
|
||||
for gcov in gcov_files:
|
||||
processGcov(cwd, gcov, files, gcov_filter_options)
|
||||
return files
|
||||
|
||||
def dumpBranchCoverageToLcovInfo(f, branches):
|
||||
branch_miss = 0
|
||||
for line_num, branch_counts in branches.items():
|
||||
for i, count in enumerate(branch_counts):
|
||||
#Branch (<line number>, <block number>, <branch number>, <taken>)
|
||||
f.write("BRDA:%s,%d,%d,%d\n" % (line_num, int(i/2), i, count))
|
||||
branch_miss += int(count == 0)
|
||||
f.write("BRF:%d\n" % len(branches)) #Branches Found
|
||||
f.write("BRH:%d\n" % (len(branches) - branch_miss)) #Branches Hit
|
||||
|
||||
def dumpToLcovInfo(fastcov_json, output):
|
||||
with open(output, "w") as f:
|
||||
for sf, data in fastcov_json["sources"].items():
|
||||
f.write("SF:%s\n" % sf) #Source File
|
||||
|
||||
fn_miss = 0
|
||||
for function, fdata in data["functions"].items():
|
||||
f.write("FN:%d,%s\n" % (fdata["start_line"], function)) #Function Start Line
|
||||
f.write("FNDA:%d,%s\n" % (fdata["execution_count"], function)) #Function Hits
|
||||
fn_miss += int(fdata["execution_count"] == 0)
|
||||
f.write("FNF:%d\n" % len(data["functions"])) #Functions Found
|
||||
f.write("FNH:%d\n" % (len(data["functions"]) - fn_miss)) #Functions Hit
|
||||
|
||||
if data["branches"]:
|
||||
dumpBranchCoverageToLcovInfo(f, data["branches"])
|
||||
|
||||
line_miss = 0
|
||||
for line_num, count in data["lines"].items():
|
||||
f.write("DA:%s,%d\n" % (line_num, count)) #Line
|
||||
line_miss += int(count == 0)
|
||||
f.write("LF:%d\n" % len(data["lines"])) #Lines Found
|
||||
f.write("LH:%d\n" % (len(data["lines"]) - line_miss)) #Lines Hit
|
||||
f.write("end_of_record\n")
|
||||
|
||||
def exclMarkerWorker(fastcov_sources, chunk):
|
||||
for source in chunk:
|
||||
# If there are no covered lines, skip
|
||||
if not fastcov_sources[source]["lines"]:
|
||||
continue
|
||||
|
||||
start_line = 0
|
||||
end_line = 0
|
||||
with open(source) as f:
|
||||
for i, line in enumerate(f, 1): #Start enumeration at line 1
|
||||
if not "LCOV_EXCL" in line:
|
||||
continue
|
||||
|
||||
if "LCOV_EXCL_LINE" in line:
|
||||
if str(i) in fastcov_sources[source]["lines"]:
|
||||
del fastcov_sources[source]["lines"][str(i)]
|
||||
if str(i) in fastcov_sources[source]["branches"]:
|
||||
del fastcov_sources[source]["branches"][str(i)]
|
||||
elif "LCOV_EXCL_START" in line:
|
||||
start_line = i
|
||||
elif "LCOV_EXCL_STOP" in line:
|
||||
end_line = i
|
||||
|
||||
if not start_line:
|
||||
end_line = 0
|
||||
continue
|
||||
|
||||
for key in ["lines", "branches"]:
|
||||
for line_num in list(fastcov_sources[source][key].keys()):
|
||||
if int(line_num) <= end_line and int(line_num) >= start_line:
|
||||
del fastcov_sources[source][key][line_num]
|
||||
|
||||
start_line = end_line = 0
|
||||
|
||||
def scanExclusionMarkers(fastcov_json, jobs):
|
||||
chunk_size = max(MINIMUM_CHUNK_SIZE, int(len(fastcov_json["sources"]) / jobs) + 1)
|
||||
|
||||
threads = []
|
||||
for chunk in chunks(list(fastcov_json["sources"].keys()), chunk_size):
|
||||
t = threading.Thread(target=exclMarkerWorker, args=(fastcov_json["sources"], chunk))
|
||||
threads.append(t)
|
||||
t.start()
|
||||
|
||||
log("Spawned %d threads each scanning at most %d source files" % (len(threads), chunk_size))
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
def distillFunction(function_raw, functions):
|
||||
function_name = function_raw["name"]
|
||||
if function_name not in functions:
|
||||
functions[function_name] = {
|
||||
"start_line": function_raw["start_line"],
|
||||
"execution_count": function_raw["execution_count"]
|
||||
}
|
||||
else:
|
||||
functions[function_name]["execution_count"] += function_raw["execution_count"]
|
||||
|
||||
def distillLine(line_raw, lines, branches):
|
||||
line_number = str(line_raw["line_number"])
|
||||
if line_number not in lines:
|
||||
lines[line_number] = line_raw["count"]
|
||||
else:
|
||||
lines[line_number] += line_raw["count"]
|
||||
|
||||
for i, branch in enumerate(line_raw["branches"]):
|
||||
if line_number not in branches:
|
||||
branches[line_number] = []
|
||||
blen = len(branches[line_number])
|
||||
glen = len(line_raw["branches"])
|
||||
if blen < glen:
|
||||
branches[line_number] += [0] * (glen - blen)
|
||||
branches[line_number][i] += branch["count"]
|
||||
|
||||
def distillSource(source_raw, sources):
|
||||
source_name = source_raw["file_abs"]
|
||||
if source_name not in sources:
|
||||
sources[source_name] = {
|
||||
"functions": {},
|
||||
"branches": {},
|
||||
"lines": {},
|
||||
}
|
||||
|
||||
for function in source_raw["functions"]:
|
||||
distillFunction(function, sources[source_name]["functions"])
|
||||
|
||||
for line in source_raw["lines"]:
|
||||
distillLine(line, sources[source_name]["lines"], sources[source_name]["branches"])
|
||||
|
||||
def distillReport(report_raw):
|
||||
report_json = {
|
||||
"sources": {}
|
||||
}
|
||||
|
||||
for source in report_raw:
|
||||
distillSource(source, report_json["sources"])
|
||||
|
||||
return report_json
|
||||
|
||||
def dumpToJson(intermediate, output):
|
||||
with open(output, "w") as f:
|
||||
json.dump(intermediate, f)
|
||||
|
||||
def log(line):
|
||||
if not args.quiet:
|
||||
print("[{:.3f}s] {}".format(stopwatch(), line))
|
||||
|
||||
def getGcovFilterOptions(args):
|
||||
return {
|
||||
"sources": set([os.path.abspath(s) for s in args.sources]), #Make paths absolute, use set for fast lookups
|
||||
"include": args.includepost,
|
||||
"exclude": args.excludepost,
|
||||
}
|
||||
|
||||
def main(args):
|
||||
# Need at least gcov 9.0.0 because that's when gcov JSON and stdout streaming was introduced
|
||||
current_gcov_version = getGcovVersion(args.gcov)
|
||||
if current_gcov_version < MINIMUM_GCOV:
|
||||
sys.stderr.write("Minimum gcov version {} required, found {}\n".format(".".join(map(str, MINIMUM_GCOV)), ".".join(map(str, current_gcov_version))))
|
||||
exit(1)
|
||||
|
||||
# Get list of gcda files to process
|
||||
gcda_files = getGcdaFiles(args.directory, args.gcda_files)
|
||||
log("Found {} .gcda files ".format(len(gcda_files)))
|
||||
|
||||
# If gcda filtering is enabled, filter them out now
|
||||
if args.excludepre:
|
||||
gcda_files = getFilteredGcdaFiles(gcda_files, args.excludepre)
|
||||
log("{} .gcda files after filtering".format(len(gcda_files)))
|
||||
|
||||
# We "zero" the "counters" by simply deleting all gcda files
|
||||
if args.zerocounters:
|
||||
removeFiles(gcda_files)
|
||||
log("{} .gcda files removed".format(len(gcda_files)))
|
||||
return
|
||||
|
||||
# Fire up one gcov per cpu and start processing gcdas
|
||||
gcov_filter_options = getGcovFilterOptions(args)
|
||||
intermediate_json_files = processGcdas(args.cdirectory, args.gcov, args.jobs, gcda_files, gcov_filter_options, args.branchcoverage)
|
||||
|
||||
# Summarize processing results
|
||||
gcov_total = sum(GCOVS_TOTAL)
|
||||
gcov_skipped = sum(GCOVS_SKIPPED)
|
||||
log("Processed {} .gcov files ({} total, {} skipped)".format(gcov_total - gcov_skipped, gcov_total, gcov_skipped))
|
||||
|
||||
# Distill all the extraneous info gcov gives us down to the core report
|
||||
fastcov_json = distillReport(intermediate_json_files)
|
||||
log("Aggregated raw gcov JSON into fastcov JSON report")
|
||||
|
||||
# Dump to desired file format
|
||||
if args.lcov:
|
||||
scanExclusionMarkers(fastcov_json, args.jobs)
|
||||
log("Scanned {} source files for exclusion markers".format(len(fastcov_json["sources"])))
|
||||
dumpToLcovInfo(fastcov_json, args.output)
|
||||
log("Created lcov info file '{}'".format(args.output))
|
||||
elif args.gcov_raw:
|
||||
dumpToJson(intermediate_json_files, args.output)
|
||||
log("Created gcov raw json file '{}'".format(args.output))
|
||||
else:
|
||||
dumpToJson(fastcov_json, args.output)
|
||||
log("Created fastcov json file '{}'".format(args.output))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='A parallel gcov wrapper for fast coverage report generation')
|
||||
parser.add_argument('-z', '--zerocounters', dest='zerocounters', action="store_true", help='Recursively delete all gcda files')
|
||||
|
||||
# Enable Branch Coverage
|
||||
parser.add_argument('-b', '--branch-coverage', dest='branchcoverage', action="store_true", help='Include branch counts in the coverage report')
|
||||
|
||||
# Filtering Options
|
||||
parser.add_argument('-s', '--source-files', dest='sources', nargs="+", metavar='', default=[], help='Filter: Specify exactly which source files should be included in the final report. Paths must be either absolute or relative to current directory.')
|
||||
parser.add_argument('-e', '--exclude', dest='excludepost', nargs="+", metavar='', default=[], help='Filter: Exclude source files from final report if they contain one of the provided substrings (i.e. /usr/include test/, etc.)')
|
||||
parser.add_argument('-i', '--include', dest='includepost', nargs="+", metavar='', default=[], help='Filter: Only include source files in final report that contain one of the provided substrings (i.e. src/ etc.)')
|
||||
parser.add_argument('-f', '--gcda-files', dest='gcda_files', nargs="+", metavar='', default=[], help='Filter: Specify exactly which gcda files should be processed instead of recursively searching the search directory.')
|
||||
parser.add_argument('-E', '--exclude-gcda', dest='excludepre', nargs="+", metavar='', default=[], help='Filter: Exclude gcda files from being processed via simple find matching (not regex)')
|
||||
|
||||
parser.add_argument('-g', '--gcov', dest='gcov', default='gcov', help='Which gcov binary to use')
|
||||
|
||||
parser.add_argument('-d', '--search-directory', dest='directory', default=".", help='Base directory to recursively search for gcda files (default: .)')
|
||||
parser.add_argument('-c', '--compiler-directory', dest='cdirectory', default=".", help='Base directory compiler was invoked from (default: .) \
|
||||
This needs to be set if invoking fastcov from somewhere other than the base compiler directory.')
|
||||
|
||||
parser.add_argument('-j', '--jobs', dest='jobs', type=int, default=multiprocessing.cpu_count(), help='Number of parallel gcov to spawn (default: %d).' % multiprocessing.cpu_count())
|
||||
parser.add_argument('-m', '--minimum-chunk-size', dest='minimum_chunk', type=int, default=5, help='Minimum number of files a thread should process (default: 5). \
|
||||
If you have only 4 gcda files but they are monstrously huge, you could change this value to a 1 so that each thread will only process 1 gcda. Otherise fastcov will spawn only 1 thread to process all of them.')
|
||||
|
||||
parser.add_argument('-l', '--lcov', dest='lcov', action="store_true", help='Output in lcov info format instead of fastcov json')
|
||||
parser.add_argument('-r', '--gcov-raw', dest='gcov_raw', action="store_true", help='Output in gcov raw json instead of fastcov json')
|
||||
parser.add_argument('-o', '--output', dest='output', default="coverage.json", help='Name of output file (default: coverage.json)')
|
||||
parser.add_argument('-q', '--quiet', dest='quiet', action="store_true", help='Suppress output to stdout')
|
||||
|
||||
args = parser.parse_args()
|
||||
main(args)
|
0
third_party/cpplint/LICENSE
vendored
Executable file → Normal file
0
third_party/cpplint/LICENSE
vendored
Executable file → Normal file
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue