|
11 | 11 |
|
12 | 12 | using namespace jsoncons; |
13 | 13 |
|
| 14 | +#if 0 |
14 | 15 | TEST_CASE("json_tokenizer constructor test") |
15 | 16 | { |
16 | 17 | SECTION("default constructor") |
@@ -340,10 +341,11 @@ TEST_CASE("json_tokenizer update test") |
340 | 341 | } |
341 | 342 | } |
342 | 343 | } |
| 344 | +#endif |
343 | 345 |
|
344 | 346 | TEST_CASE("json_tokenizer incremental update tests") |
345 | 347 | { |
346 | | - SECTION("test 1") |
| 348 | + /*SECTION("test 1") |
347 | 349 | { |
348 | 350 | std::string data{"123456"}; |
349 | 351 | std::string more_data{"78"}; |
@@ -653,6 +655,121 @@ TEST_CASE("json_tokenizer incremental update tests") |
653 | 655 | std::cout << "ec: " << rc.ec << "\n"; |
654 | 656 | } while (!tokenizer.done()); |
655 | 657 |
|
| 658 | + }*/ |
| 659 | + |
| 660 | + SECTION("test 8") |
| 661 | + { |
| 662 | + std::string data = R"([ |
| 663 | + { |
| 664 | + "given": |
| 665 | + [{"name": "JS"}] |
| 666 | + } |
| 667 | +] |
| 668 | +)"; |
| 669 | + |
| 670 | + json_tokenizer tokenizer{}; |
| 671 | + |
| 672 | + std::istringstream is(data); |
| 673 | + stream_source<char> source(is, 8); |
| 674 | + |
| 675 | + auto chunk = source.read_buffer(); |
| 676 | + tokenizer.update(chunk.data(), chunk.size()); |
| 677 | + |
| 678 | + auto r = tokenizer.try_next_token(); |
| 679 | + REQUIRE(r); |
| 680 | + CHECK_FALSE(tokenizer.done()); |
| 681 | + CHECK(tokenizer.started()); |
| 682 | + CHECK_FALSE(tokenizer.done()); |
| 683 | + CHECK_FALSE(tokenizer.source_exhausted()); |
| 684 | + CHECK(generic_token_kind::begin_array == tokenizer.token_kind()); |
| 685 | + |
| 686 | + r = tokenizer.try_next_token(); |
| 687 | + REQUIRE(r); |
| 688 | + CHECK_FALSE(tokenizer.done()); |
| 689 | + CHECK(generic_token_kind::begin_map == tokenizer.token_kind()); |
| 690 | + CHECK_FALSE(tokenizer.source_exhausted()); |
| 691 | + |
| 692 | + r = tokenizer.try_next_token(); |
| 693 | + REQUIRE(r); |
| 694 | + CHECK_FALSE(tokenizer.done()); |
| 695 | + CHECK(generic_token_kind{} == tokenizer.token_kind()); |
| 696 | + CHECK(tokenizer.source_exhausted()); |
| 697 | + chunk = source.read_buffer(); |
| 698 | + tokenizer.update(chunk.data(), chunk.size()); |
| 699 | + |
| 700 | + r = tokenizer.try_next_token(); |
| 701 | + REQUIRE(r); |
| 702 | + CHECK_FALSE(tokenizer.done()); |
| 703 | + CHECK(generic_token_kind{} == tokenizer.token_kind()); |
| 704 | + CHECK(tokenizer.source_exhausted()); |
| 705 | + chunk = source.read_buffer(); |
| 706 | + tokenizer.update(chunk.data(), chunk.size()); |
| 707 | + |
| 708 | + r = tokenizer.try_next_token(); |
| 709 | + REQUIRE(r); |
| 710 | + CHECK_FALSE(tokenizer.done()); |
| 711 | + CHECK_FALSE(tokenizer.source_exhausted()); |
| 712 | + CHECK(generic_token_kind::string_value == tokenizer.token_kind()); |
| 713 | + CHECK(tokenizer.is_key()); |
| 714 | + |
| 715 | + r = tokenizer.try_next_token(); |
| 716 | + REQUIRE(r); |
| 717 | + CHECK_FALSE(tokenizer.done()); |
| 718 | + CHECK(generic_token_kind{} == tokenizer.token_kind()); |
| 719 | + CHECK(tokenizer.source_exhausted()); |
| 720 | + chunk = source.read_buffer(); |
| 721 | + tokenizer.update(chunk.data(), chunk.size()); |
| 722 | + |
| 723 | + r = tokenizer.try_next_token(); |
| 724 | + REQUIRE(r); |
| 725 | + CHECK_FALSE(tokenizer.done()); |
| 726 | + CHECK(generic_token_kind{} == tokenizer.token_kind()); |
| 727 | + CHECK(tokenizer.source_exhausted()); |
| 728 | + chunk = source.read_buffer(); |
| 729 | + tokenizer.update(chunk.data(), chunk.size()); |
| 730 | + |
| 731 | + r = tokenizer.try_next_token(); |
| 732 | + REQUIRE(r); |
| 733 | + CHECK_FALSE(tokenizer.done()); |
| 734 | + CHECK_FALSE(tokenizer.source_exhausted()); |
| 735 | + CHECK(generic_token_kind::begin_array == tokenizer.token_kind()); |
| 736 | + CHECK_FALSE(tokenizer.is_key()); |
| 737 | + |
| 738 | + r = tokenizer.try_next_token(); |
| 739 | + REQUIRE(r); |
| 740 | + CHECK_FALSE(tokenizer.done()); |
| 741 | + CHECK(generic_token_kind::begin_map == tokenizer.token_kind()); |
| 742 | + CHECK_FALSE(tokenizer.source_exhausted()); |
| 743 | + |
| 744 | + r = tokenizer.try_next_token(); |
| 745 | + REQUIRE(r); |
| 746 | + CHECK_FALSE(tokenizer.done()); |
| 747 | + CHECK(generic_token_kind{} == tokenizer.token_kind()); |
| 748 | + CHECK(tokenizer.source_exhausted()); |
| 749 | + chunk = source.read_buffer(); |
| 750 | + tokenizer.update(chunk.data(), chunk.size()); |
| 751 | + |
| 752 | + r = tokenizer.try_next_token(); |
| 753 | + REQUIRE(r); |
| 754 | + CHECK_FALSE(tokenizer.done()); |
| 755 | + CHECK_FALSE(tokenizer.source_exhausted()); |
| 756 | + CHECK(generic_token_kind::string_value == tokenizer.token_kind()); |
| 757 | + CHECK(tokenizer.is_key()); |
| 758 | + |
| 759 | + r = tokenizer.try_next_token(); |
| 760 | + REQUIRE(r); |
| 761 | + CHECK_FALSE(tokenizer.done()); |
| 762 | + CHECK_FALSE(tokenizer.source_exhausted()); |
| 763 | + CHECK(generic_token_kind::string_value == tokenizer.token_kind()); |
| 764 | + CHECK_FALSE(tokenizer.is_key()); |
| 765 | + |
| 766 | + r = tokenizer.try_next_token(); |
| 767 | + REQUIRE(r); |
| 768 | + CHECK_FALSE(tokenizer.done()); |
| 769 | + CHECK(generic_token_kind::end_map == tokenizer.token_kind()); // missing event |
| 770 | + CHECK(tokenizer.source_exhausted()); |
| 771 | + chunk = source.read_buffer(); |
| 772 | + tokenizer.update(chunk.data(), chunk.size()); |
656 | 773 | } |
657 | 774 | } |
658 | 775 |
|
0 commit comments