41
41
42
42
/**
43
43
* @author Arjen Poutsma
44
+ * @author Rossen Stoyanchev
44
45
*/
45
46
public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase {
46
47
47
48
private ObjectMapper objectMapper ;
48
49
49
50
private JsonFactory jsonFactory ;
50
51
52
+
51
53
@ Before
52
- public void createParser () throws IOException {
53
- jsonFactory = new JsonFactory ();
54
- this .objectMapper = new ObjectMapper (jsonFactory );
54
+ public void createParser () {
55
+ this . jsonFactory = new JsonFactory ();
56
+ this .objectMapper = new ObjectMapper (this . jsonFactory );
55
57
}
56
58
57
59
@ Test
@@ -66,41 +68,44 @@ public void doNotTokenizeArrayElements() {
66
68
singletonList ("{\" foo\" :\" foofoo\" ,\" bar\" :\" barbar\" }" ), false );
67
69
68
70
testTokenize (
69
- singletonList ("[{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
70
- singletonList ("[{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ), false );
71
+ singletonList ("[" +
72
+ "{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }," +
73
+ "{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
74
+ singletonList ("[" +
75
+ "{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }," +
76
+ "{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ), false );
71
77
72
78
testTokenize (
73
79
singletonList ("[{\" foo\" : \" bar\" },{\" foo\" : \" baz\" }]" ),
74
80
singletonList ("[{\" foo\" : \" bar\" },{\" foo\" : \" baz\" }]" ), false );
75
81
76
82
testTokenize (
77
- asList ("[{\" foo\" : \" foofoo\" , \" bar\" " ,
78
- ": \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
79
- singletonList ("[{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ), false );
83
+ asList ("[" +
84
+ "{\" foo\" : \" foofoo\" , \" bar\" " , ": \" barbar\" }," +
85
+ "{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
86
+ singletonList ("[" +
87
+ "{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }," +
88
+ "{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ), false );
80
89
81
90
testTokenize (
82
91
asList ("[" ,
83
- "{\" id\" :1,\" name\" :\" Robert\" }" ,
84
- "," ,
85
- "{\" id\" :2 ,\" name\" :\" Raide \" }" ,
86
- "," ,
87
- "{\" id\" :3 ,\" name\" :\" Ford \" }" ,
88
- "]" ),
89
- singletonList ( "[{ \" id \" :1, \" name \" : \" Robert \" },{ \" id \" :2, \" name \" : \" Raide \" }, {\" id\" :3,\" name\" :\" Ford\" }]" ), false );
92
+ "{\" id\" :1,\" name\" :\" Robert\" }" , "," ,
93
+ "{ \" id \" :2, \" name \" : \" Raide \" }" , " ," ,
94
+ "{\" id\" :3 ,\" name\" :\" Ford \" }" , "]" ) ,
95
+ singletonList ( "[" +
96
+ "{\" id\" :1 ,\" name\" :\" Robert \" }," +
97
+ "{ \" id \" :2, \" name \" : \" Raide \" }," +
98
+ " {\" id\" :3,\" name\" :\" Ford\" }]" ), false );
90
99
91
100
// SPR-16166: top-level JSON values
92
- testTokenize (asList ("\" foo" , "bar\" " )
93
- ,singletonList ("\" foobar\" " ), false );
101
+ testTokenize (asList ("\" foo" , "bar\" " ),singletonList ("\" foobar\" " ), false );
94
102
95
- testTokenize (asList ("12" , "34" )
96
- ,singletonList ("1234" ), false );
103
+ testTokenize (asList ("12" , "34" ),singletonList ("1234" ), false );
97
104
98
- testTokenize (asList ("12." , "34" )
99
- ,singletonList ("12.34" ), false );
105
+ testTokenize (asList ("12." , "34" ),singletonList ("12.34" ), false );
100
106
101
107
// note that we do not test for null, true, or false, which are also valid top-level values,
102
108
// but are unsupported by JSONassert
103
-
104
109
}
105
110
106
111
@ Test
@@ -110,19 +115,20 @@ public void tokenizeArrayElements() {
110
115
singletonList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ), true );
111
116
112
117
testTokenize (
113
- asList ("{\" foo\" : \" foofoo\" " ,
114
- ", \" bar\" : \" barbar\" }" ),
118
+ asList ("{\" foo\" : \" foofoo\" " , ", \" bar\" : \" barbar\" }" ),
115
119
singletonList ("{\" foo\" :\" foofoo\" ,\" bar\" :\" barbar\" }" ), true );
116
120
117
121
testTokenize (
118
- singletonList ("[{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
119
- asList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ,
122
+ singletonList ("[" +
123
+ "{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }," +
124
+ "{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
125
+ asList (
126
+ "{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ,
120
127
"{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }" ), true );
121
128
122
129
testTokenize (
123
130
singletonList ("[{\" foo\" : \" bar\" },{\" foo\" : \" baz\" }]" ),
124
- asList ("{\" foo\" : \" bar\" }" ,
125
- "{\" foo\" : \" baz\" }" ), true );
131
+ asList ("{\" foo\" : \" bar\" }" , "{\" foo\" : \" baz\" }" ), true );
126
132
127
133
// SPR-15803: nested array
128
134
testTokenize (
@@ -134,18 +140,19 @@ public void tokenizeArrayElements() {
134
140
asList (
135
141
"{\" id\" :\" 0\" ,\" start\" :[-999999999,1,1],\" end\" :[999999999,12,31]}" ,
136
142
"{\" id\" :\" 1\" ,\" start\" :[-999999999,1,1],\" end\" :[999999999,12,31]}" ,
137
- "{\" id\" :\" 2\" ,\" start\" :[-999999999,1,1],\" end\" :[999999999,12,31]}" ),
138
- true );
143
+ "{\" id\" :\" 2\" ,\" start\" :[-999999999,1,1],\" end\" :[999999999,12,31]}" ), true );
139
144
140
145
// SPR-15803: nested array, no top-level array
141
146
testTokenize (
142
147
singletonList ("{\" speakerIds\" :[\" tastapod\" ],\" language\" :\" ENGLISH\" }" ),
143
148
singletonList ("{\" speakerIds\" :[\" tastapod\" ],\" language\" :\" ENGLISH\" }" ), true );
144
149
145
150
testTokenize (
146
- asList ("[{\" foo\" : \" foofoo\" , \" bar\" " ,
147
- ": \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
148
- asList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ,
151
+ asList ("[" +
152
+ "{\" foo\" : \" foofoo\" , \" bar\" " , ": \" barbar\" }," +
153
+ "{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
154
+ asList (
155
+ "{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ,
149
156
"{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }" ), true );
150
157
151
158
testTokenize (
@@ -161,18 +168,14 @@ public void tokenizeArrayElements() {
161
168
"{\" id\" :3,\" name\" :\" Ford\" }" ), true );
162
169
163
170
// SPR-16166: top-level JSON values
164
- testTokenize (asList ("\" foo" , "bar\" " )
165
- ,singletonList ("\" foobar\" " ), true );
171
+ testTokenize (asList ("\" foo" , "bar\" " ),singletonList ("\" foobar\" " ), true );
166
172
167
- testTokenize (asList ("12" , "34" )
168
- ,singletonList ("1234" ), true );
173
+ testTokenize (asList ("12" , "34" ),singletonList ("1234" ), true );
169
174
170
- testTokenize (asList ("12." , "34" )
171
- ,singletonList ("12.34" ), true );
175
+ testTokenize (asList ("12." , "34" ),singletonList ("12.34" ), true );
172
176
173
177
// SPR-16407
174
- testTokenize (asList ("[1" , ",2," , "3]" ),
175
- asList ("1" , "2" , "3" ), true );
178
+ testTokenize (asList ("[1" , ",2," , "3]" ), asList ("1" , "2" , "3" ), true );
176
179
}
177
180
178
181
@ Test (expected = DecodingException .class ) // SPR-16521
@@ -184,11 +187,11 @@ public void jsonEOFExceptionIsWrappedAsDecodingError() {
184
187
185
188
186
189
private void testTokenize (List <String > source , List <String > expected , boolean tokenizeArrayElements ) {
187
- Flux <DataBuffer > sourceFlux = Flux .fromIterable (source )
188
- .map (this ::stringBuffer );
189
190
190
- Flux <TokenBuffer > tokenBufferFlux =
191
- Jackson2Tokenizer .tokenize (sourceFlux , this .jsonFactory , tokenizeArrayElements );
191
+ Flux <TokenBuffer > tokenBufferFlux = Jackson2Tokenizer .tokenize (
192
+ Flux .fromIterable (source ).map (this ::stringBuffer ),
193
+ this .jsonFactory ,
194
+ tokenizeArrayElements );
192
195
193
196
Flux <String > result = tokenBufferFlux
194
197
.map (tokenBuffer -> {
@@ -202,17 +205,16 @@ private void testTokenize(List<String> source, List<String> expected, boolean to
202
205
});
203
206
204
207
StepVerifier .FirstStep <String > builder = StepVerifier .create (result );
205
- for (String s : expected ) {
206
- builder .assertNext (new JSONAssertConsumer (s ));
207
- }
208
+ expected .forEach (s -> builder .assertNext (new JSONAssertConsumer (s )));
208
209
builder .verifyComplete ();
209
210
}
210
211
212
+
211
213
private static class JSONAssertConsumer implements Consumer <String > {
212
214
213
215
private final String expected ;
214
216
215
- public JSONAssertConsumer (String expected ) {
217
+ JSONAssertConsumer (String expected ) {
216
218
this .expected = expected ;
217
219
}
218
220
@@ -226,6 +228,4 @@ public void accept(String s) {
226
228
}
227
229
}
228
230
}
229
-
230
-
231
231
}
0 commit comments