GCC Code Coverage Report
Directory: ./ Exec Total Coverage
File: test/zserio/JsonTokenizerTest.cpp Lines: 204 204 100.0 %
Date: 2023-12-13 14:51:09 Branches: 684 2707 25.3 %

Line Branch Exec Source
1
#include "gtest/gtest.h"
2
3
#include <sstream>
4
5
#include "zserio/JsonTokenizer.h"
6
7
namespace zserio
8
{
9
10
using JsonTokenizer = BasicJsonTokenizer<>;
11
12


802
TEST(JsonTokenizerTest, tokens)
13
{
14

2
    std::stringstream str("{\"array\":\n[\n{\"key\":\n10}]}");
15

2
    auto tokenizer = JsonTokenizer(str, std::allocator<uint8_t>());
16



1
    ASSERT_EQ(JsonToken::BEGIN_OBJECT, tokenizer.next());
17



1
    ASSERT_EQ('{', tokenizer.getValue().get<char>());
18



1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next());
19



1
    ASSERT_EQ("array", tokenizer.getValue().get<string<>>());
20



1
    ASSERT_EQ(JsonToken::KEY_SEPARATOR, tokenizer.next());
21



1
    ASSERT_EQ(':', tokenizer.getValue().get<char>());
22



1
    ASSERT_EQ(JsonToken::BEGIN_ARRAY, tokenizer.next());
23



1
    ASSERT_EQ('[', tokenizer.getValue().get<char>());
24



1
    ASSERT_EQ(JsonToken::BEGIN_OBJECT, tokenizer.next());
25



1
    ASSERT_EQ('{', tokenizer.getValue().get<char>());
26



1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next());
27



1
    ASSERT_EQ("key", tokenizer.getValue().get<string<>>());
28



1
    ASSERT_EQ(JsonToken::KEY_SEPARATOR, tokenizer.next());
29



1
    ASSERT_EQ(':', tokenizer.getValue().get<char>());
30



1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next());
31



1
    ASSERT_EQ(10, tokenizer.getValue().get<uint64_t>());
32



1
    ASSERT_EQ(JsonToken::END_OBJECT, tokenizer.next());
33



1
    ASSERT_EQ('}', tokenizer.getValue().get<char>());
34



1
    ASSERT_EQ(JsonToken::END_ARRAY, tokenizer.next());
35



1
    ASSERT_EQ(']', tokenizer.getValue().get<char>());
36



1
    ASSERT_EQ(JsonToken::END_OBJECT, tokenizer.next());
37



1
    ASSERT_EQ('}', tokenizer.getValue().get<char>());
38




1
    ASSERT_EQ(JsonToken::END_OF_FILE, tokenizer.next());
39
}
40
41


802
TEST(JsonTokenizerTest, lineColumn)
42
{
43

2
    std::stringstream str("\n\t{\r   \"key\"  \r\n\t :\n10}\r");
44

2
    JsonTokenizer tokenizer(str, std::allocator<uint8_t>());
45
46



1
    ASSERT_EQ(JsonToken::BEGIN_OBJECT, tokenizer.next());
47



1
    ASSERT_EQ('{', tokenizer.getValue().get<char>());
48



1
    ASSERT_EQ(2, tokenizer.getLine());
49



1
    ASSERT_EQ(2, tokenizer.getColumn());
50
51



1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next());
52



1
    ASSERT_EQ("key", tokenizer.getValue().get<string<>>());
53



1
    ASSERT_EQ(3, tokenizer.getLine());
54



1
    ASSERT_EQ(4, tokenizer.getColumn());
55
56



1
    ASSERT_EQ(JsonToken::KEY_SEPARATOR, tokenizer.next());
57



1
    ASSERT_EQ(':', tokenizer.getValue().get<char>());
58



1
    ASSERT_EQ(4, tokenizer.getLine());
59



1
    ASSERT_EQ(3, tokenizer.getColumn());
60
61



1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next());
62



1
    ASSERT_EQ(10, tokenizer.getValue().get<uint64_t>());
63



1
    ASSERT_EQ(5, tokenizer.getLine());
64



1
    ASSERT_EQ(1, tokenizer.getColumn());
65
66



1
    ASSERT_EQ(JsonToken::END_OBJECT, tokenizer.next());
67



1
    ASSERT_EQ('}', tokenizer.getValue().get<char>());
68



1
    ASSERT_EQ(5, tokenizer.getLine());
69



1
    ASSERT_EQ(3, tokenizer.getColumn());
70
71



1
    ASSERT_EQ(JsonToken::END_OF_FILE, tokenizer.next());
72



1
    ASSERT_FALSE(tokenizer.getValue().hasValue());
73



1
    ASSERT_EQ(5, tokenizer.getLine());
74



1
    ASSERT_EQ(4, tokenizer.getColumn());
75
}
76
77


802
TEST(JsonTokenizerTest, longInputSplitInNumber)
78
{
79
2
    std::stringstream str;
80
1
    str << "{\n"; // 2 chars
81
4001
    for (size_t i = 0; i < 4000; ++i) // 20 x 4000 > 65534 to check reading by chunks
82
    {
83
        // BUFFER_SIZE is 65536, thus 65534 % 20 gives position within the string below
84
        // where the buffer will be split => 14, which is somewhere in the middle of the number
85
        //     |->            <-|
86
4000
        str << "  \"key\": 100000000,\n"; // 20 chars
87
    }
88
1
    str << "  \"key\": 100000000\n";
89
1
    str << '}';
90
91

2
    JsonTokenizer tokenizer(str, std::allocator<uint8_t>());
92
93



1
    ASSERT_EQ(JsonToken::BEGIN_OBJECT, tokenizer.next());
94



1
    ASSERT_EQ('{', tokenizer.getValue().get<char>());
95



1
    ASSERT_EQ(1, tokenizer.getLine());
96



1
    ASSERT_EQ(1, tokenizer.getColumn());
97
98
1
    size_t i = 0;
99
8001
    for (; i < 4000; ++i)
100
    {
101




4000
        ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
102




4000
        ASSERT_EQ("key", tokenizer.getValue().get<string<>>()) << "i=" << i;
103




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
104




4000
        ASSERT_EQ(3, tokenizer.getColumn()) << "i=" << i;
105
106




4000
        ASSERT_EQ(JsonToken::KEY_SEPARATOR, tokenizer.next()) << "i=" << i;
107




4000
        ASSERT_EQ(':', tokenizer.getValue().get<char>()) << "i=" << i;
108




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
109




4000
        ASSERT_EQ(8, tokenizer.getColumn()) << "i=" << i;
110
111




4000
        ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
112




4000
        ASSERT_EQ(100000000, tokenizer.getValue().get<uint64_t>()) << "i=" << i;
113




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
114




4000
        ASSERT_EQ(10, tokenizer.getColumn()) << "i=" << i;
115
116




4000
        ASSERT_EQ(JsonToken::ITEM_SEPARATOR, tokenizer.next()) << "i=" << i;
117




4000
        ASSERT_EQ(',', tokenizer.getValue().get<char>()) << "i=" << i;
118




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
119




4000
        ASSERT_EQ(19, tokenizer.getColumn()) << "i=" << i;
120
    }
121
122




1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
123




1
    ASSERT_EQ("key", tokenizer.getValue().get<string<>>()) << "i=" << i;
124




1
    ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
125




1
    ASSERT_EQ(3, tokenizer.getColumn()) << "i=" << i;
126
127




1
    ASSERT_EQ(JsonToken::KEY_SEPARATOR, tokenizer.next()) << "i=" << i;
128




1
    ASSERT_EQ(':', tokenizer.getValue().get<char>()) << "i=" << i;
129




1
    ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
130




1
    ASSERT_EQ(8, tokenizer.getColumn()) << "i=" << i;
131
132




1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
133




1
    ASSERT_EQ(100000000, tokenizer.getValue().get<uint64_t>()) << "i=" << i;
134




1
    ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
135




1
    ASSERT_EQ(10, tokenizer.getColumn()) << "i=" << i;
136
137



1
    ASSERT_EQ(JsonToken::END_OBJECT, tokenizer.next());
138



1
    ASSERT_EQ(1 + i + 2, tokenizer.getLine());
139



1
    ASSERT_EQ(1, tokenizer.getColumn());
140
}
141
142


802
TEST(JsonTokenizerTest, longInputSplitInString)
143
{
144
2
    std::stringstream str;
145
1
    str << "{\n"; // 2 chars
146
4001
    for (size_t i = 0; i < 4000; ++i) // 20 x 4000 > 65534 to check reading by chunks
147
    {
148
        // BUFFER_SIZE is 65536, thus 65534 % 20 gives position within the string below
149
        // where the buffer will be split => 14, which is somewhere in the middle of the number
150
        //     |->             <-|
151
4000
        str << "  \"key\": \"1000000\",\n"; // 20 chars
152
    }
153
1
    str << "  \"key\": \"1000000\"\n";
154
1
    str << '}';
155
156

2
    JsonTokenizer tokenizer(str, std::allocator<uint8_t>());
157
158



1
    ASSERT_EQ(JsonToken::BEGIN_OBJECT, tokenizer.next());
159



1
    ASSERT_EQ('{', tokenizer.getValue().get<char>());
160



1
    ASSERT_EQ(1, tokenizer.getLine());
161



1
    ASSERT_EQ(1, tokenizer.getColumn());
162
163
1
    size_t i = 0;
164
8001
    for (; i < 4000; ++i)
165
    {
166




4000
        ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
167




4000
        ASSERT_EQ("key", tokenizer.getValue().get<string<>>()) << "i=" << i;
168




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
169




4000
        ASSERT_EQ(3, tokenizer.getColumn()) << "i=" << i;
170
171




4000
        ASSERT_EQ(JsonToken::KEY_SEPARATOR, tokenizer.next()) << "i=" << i;
172




4000
        ASSERT_EQ(':', tokenizer.getValue().get<char>()) << "i=" << i;
173




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
174




4000
        ASSERT_EQ(8, tokenizer.getColumn()) << "i=" << i;
175
176




4000
        ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
177




4000
        ASSERT_EQ("1000000", tokenizer.getValue().get<string<>>()) << "i=" << i;
178




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
179




4000
        ASSERT_EQ(10, tokenizer.getColumn()) << "i=" << i;
180
181




4000
        ASSERT_EQ(JsonToken::ITEM_SEPARATOR, tokenizer.next()) << "i=" << i;
182




4000
        ASSERT_EQ(',', tokenizer.getValue().get<char>()) << "i=" << i;
183




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
184




4000
        ASSERT_EQ(19, tokenizer.getColumn()) << "i=" << i;
185
    }
186
187




1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
188




1
    ASSERT_EQ("key", tokenizer.getValue().get<string<>>()) << "i=" << i;
189




1
    ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
190




1
    ASSERT_EQ(3, tokenizer.getColumn()) << "i=" << i;
191
192




1
    ASSERT_EQ(JsonToken::KEY_SEPARATOR, tokenizer.next()) << "i=" << i;
193




1
    ASSERT_EQ(':', tokenizer.getValue().get<char>()) << "i=" << i;
194




1
    ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
195




1
    ASSERT_EQ(8, tokenizer.getColumn()) << "i=" << i;
196
197




1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
198




1
    ASSERT_EQ("1000000", tokenizer.getValue().get<string<>>()) << "i=" << i;
199




1
    ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
200




1
    ASSERT_EQ(10, tokenizer.getColumn()) << "i=" << i;
201
202



1
    ASSERT_EQ(JsonToken::END_OBJECT, tokenizer.next());
203



1
    ASSERT_EQ(1 + i + 2, tokenizer.getLine());
204




1
    ASSERT_EQ(1, tokenizer.getColumn()) << "i=" << i;
205
}
206
207


802
TEST(JsonTokenizerTest, longInputSplitInDoubleAfterE)
208
{
209
2
    std::stringstream str;
210
1
    str << "{\n"; // 2 chars
211
4001
    for (size_t i = 0; i < 4000; ++i) // 20 x 4000 > 65534 to check reading by chunks
212
    {
213
        // BUFFER_SIZE is 65536, thus 65534 % 20 gives position within the string below
214
        // where the buffer will be split => 14, which is somewhere in the middle of the number
215
        //     |->            <-|
216
4000
        str << "  \"key\":    1e5   ,\n"; // 20 chars
217
    }
218
1
    str << "  \"key\":    1e5  \n";
219
1
    str << '}';
220
221

2
    JsonTokenizer tokenizer(str, std::allocator<uint8_t>());
222
223



1
    ASSERT_EQ(JsonToken::BEGIN_OBJECT, tokenizer.next());
224



1
    ASSERT_EQ('{', tokenizer.getValue().get<char>());
225



1
    ASSERT_EQ(1, tokenizer.getLine());
226



1
    ASSERT_EQ(1, tokenizer.getColumn());
227
228
1
    size_t i = 0;
229
8001
    for (; i < 4000; ++i)
230
    {
231




4000
        ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
232




4000
        ASSERT_EQ("key", tokenizer.getValue().get<string<>>()) << "i=" << i;
233




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
234




4000
        ASSERT_EQ(3, tokenizer.getColumn()) << "i=" << i;
235
236




4000
        ASSERT_EQ(JsonToken::KEY_SEPARATOR, tokenizer.next()) << "i=" << i;
237




4000
        ASSERT_EQ(':', tokenizer.getValue().get<char>()) << "i=" << i;
238




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
239




4000
        ASSERT_EQ(8, tokenizer.getColumn()) << "i=" << i;
240
241




4000
        ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
242




4000
        ASSERT_EQ(1e5, tokenizer.getValue().get<double>()) << "i=" << i;
243




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
244




4000
        ASSERT_EQ(13, tokenizer.getColumn()) << "i=" << i;
245
246




4000
        ASSERT_EQ(JsonToken::ITEM_SEPARATOR, tokenizer.next()) << "i=" << i;
247




4000
        ASSERT_EQ(',', tokenizer.getValue().get<char>()) << "i=" << i;
248




4000
        ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
249




4000
        ASSERT_EQ(19, tokenizer.getColumn()) << "i=" << i;
250
    }
251
252




1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
253




1
    ASSERT_EQ("key", tokenizer.getValue().get<string<>>()) << "i=" << i;
254




1
    ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
255




1
    ASSERT_EQ(3, tokenizer.getColumn()) << "i=" << i;
256
257




1
    ASSERT_EQ(JsonToken::KEY_SEPARATOR, tokenizer.next()) << "i=" << i;
258




1
    ASSERT_EQ(':', tokenizer.getValue().get<char>()) << "i=" << i;
259




1
    ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
260




1
    ASSERT_EQ(8, tokenizer.getColumn()) << "i=" << i;
261
262




1
    ASSERT_EQ(JsonToken::VALUE, tokenizer.next()) << "i=" << i;
263




1
    ASSERT_EQ(1e5, tokenizer.getValue().get<double>()) << "i=" << i;
264




1
    ASSERT_EQ(1 + i + 1, tokenizer.getLine()) << "i=" << i;
265




1
    ASSERT_EQ(13, tokenizer.getColumn()) << "i=" << i;
266
267



1
    ASSERT_EQ(JsonToken::END_OBJECT, tokenizer.next());
268



1
    ASSERT_EQ(1 + i + 2, tokenizer.getLine());
269




1
    ASSERT_EQ(1, tokenizer.getColumn()) << "i=" << i;
270
}
271
272


802
TEST(JsonTokenizerTest, unknownToken)
273
{
274

2
    std::stringstream str("\\\n");
275

2
    JsonTokenizer tokenizer(str, std::allocator<uint8_t>());
276














2
    ASSERT_THROW({
277
        try
278
        {
279
            tokenizer.next();
280
        }
281
        catch (const JsonParserException& e)
282
        {
283
            ASSERT_STREQ("JsonTokenizer:1:1: Unknown token!", e.what());
284
            throw;
285
        }
286
    }, JsonParserException);
287
}
288
289


802
TEST(JsonTokenizerTest, cppRuntimeExceptionOperator)
290
{
291




1
    ASSERT_STREQ("UNKNOWN", (JsonParserException() << (JsonToken::UNKNOWN)).what());
292




1
    ASSERT_STREQ("BEGIN_OF_FILE", (JsonParserException() << (JsonToken::BEGIN_OF_FILE)).what());
293




1
    ASSERT_STREQ("END_OF_FILE", (JsonParserException() << (JsonToken::END_OF_FILE)).what());
294




1
    ASSERT_STREQ("BEGIN_OBJECT", (JsonParserException() << (JsonToken::BEGIN_OBJECT)).what());
295




1
    ASSERT_STREQ("END_OBJECT", (JsonParserException() << (JsonToken::END_OBJECT)).what());
296




1
    ASSERT_STREQ("BEGIN_ARRAY", (JsonParserException() << (JsonToken::BEGIN_ARRAY)).what());
297




1
    ASSERT_STREQ("END_ARRAY", (JsonParserException() << (JsonToken::END_ARRAY)).what());
298




1
    ASSERT_STREQ("KEY_SEPARATOR", (JsonParserException() << (JsonToken::KEY_SEPARATOR)).what());
299




1
    ASSERT_STREQ("ITEM_SEPARATOR", (JsonParserException() << (JsonToken::ITEM_SEPARATOR)).what());
300




1
    ASSERT_STREQ("VALUE", (JsonParserException() << (JsonToken::VALUE)).what());
301
}
302
303

2394
} // namespace zserio