moved from Catch to doctest for unit tests
This commit is contained in:
parent
e5753b14a8
commit
2f44ac1def
52 changed files with 5517 additions and 11854 deletions
|
|
@ -27,12 +27,15 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "catch.hpp"
|
||||
#include "doctest_compatibility.h"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
using nlohmann::json;
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
#include <set>
|
||||
|
||||
class SaxCountdown
|
||||
{
|
||||
|
|
@ -1586,7 +1589,8 @@ TEST_CASE("single CBOR roundtrip")
|
|||
}
|
||||
}
|
||||
|
||||
TEST_CASE("CBOR regressions", "[!throws]")
|
||||
#if not defined(JSON_NOEXCEPTION)
|
||||
TEST_CASE("CBOR regressions")
|
||||
{
|
||||
SECTION("fuzz test results")
|
||||
{
|
||||
|
|
@ -1655,12 +1659,13 @@ TEST_CASE("CBOR regressions", "[!throws]")
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
TEST_CASE("CBOR roundtrips", "[hide]")
|
||||
TEST_CASE("CBOR roundtrips" * doctest::skip())
|
||||
{
|
||||
SECTION("input from flynn")
|
||||
{
|
||||
// most of these are exluded due to differences in key order (not a real problem)
|
||||
// most of these are excluded due to differences in key order (not a real problem)
|
||||
auto exclude_packed = std::set<std::string>
|
||||
{
|
||||
"test/data/json.org/1.json",
|
||||
|
|
@ -1827,8 +1832,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
{
|
||||
CAPTURE(filename)
|
||||
|
||||
SECTION(filename + ": std::vector<uint8_t>")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": std::vector<uint8_t>");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
|
@ -1845,8 +1850,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": std::ifstream")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": std::ifstream");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
|
@ -1860,8 +1865,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": uint8_t* and size")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": uint8_t* and size");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
|
@ -1878,8 +1883,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
CHECK(j1 == j2);
|
||||
}
|
||||
|
||||
SECTION(filename + ": output to output adapters")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": output to output adapters");
|
||||
// parse JSON file
|
||||
std::ifstream f_json(filename);
|
||||
json j1 = json::parse(f_json);
|
||||
|
|
@ -1892,8 +1897,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
|
||||
if (!exclude_packed.count(filename))
|
||||
{
|
||||
SECTION(filename + ": output adapters: std::vector<uint8_t>")
|
||||
{
|
||||
INFO_WITH_TEMP(filename + ": output adapters: std::vector<uint8_t>");
|
||||
std::vector<uint8_t> vec;
|
||||
json::to_cbor(j1, vec);
|
||||
CHECK(vec == packed);
|
||||
|
|
@ -1904,7 +1909,8 @@ TEST_CASE("CBOR roundtrips", "[hide]")
|
|||
}
|
||||
}
|
||||
|
||||
TEST_CASE("all CBOR first bytes", "[!throws]")
|
||||
#if not defined(JSON_NOEXCEPTION)
|
||||
TEST_CASE("all CBOR first bytes")
|
||||
{
|
||||
// these bytes will fail immediately with exception parse_error.112
|
||||
std::set<uint8_t> unsupported =
|
||||
|
|
@ -1968,7 +1974,7 @@ TEST_CASE("all CBOR first bytes", "[!throws]")
|
|||
{
|
||||
// check that parse_error.112 is only thrown if the
|
||||
// first byte is in the unsupported set
|
||||
CAPTURE(e.what())
|
||||
INFO_WITH_TEMP(e.what());
|
||||
if (std::find(unsupported.begin(), unsupported.end(), byte) != unsupported.end())
|
||||
{
|
||||
CHECK(e.id == 112);
|
||||
|
|
@ -1980,6 +1986,7 @@ TEST_CASE("all CBOR first bytes", "[!throws]")
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
TEST_CASE("examples from RFC 7049 Appendix A")
|
||||
{
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue