Browse Source

Polish

pull/1697/merge
Rossen Stoyanchev 8 years ago
parent
commit
3bfa56dff2
  1. 9
      spring-web/src/main/java/org/springframework/http/codec/json/Jackson2Tokenizer.java
  2. 102
      spring-web/src/test/java/org/springframework/http/codec/json/Jackson2TokenizerTests.java

9
spring-web/src/main/java/org/springframework/http/codec/json/Jackson2Tokenizer.java

@ -68,8 +68,7 @@ class Jackson2Tokenizer {
} }
/** /**
* Tokenize the given {@link DataBuffer} flux into a {@link TokenBuffer} flux, given the * Tokenize the given {@code Flux<DataBuffer>} into {@code Flux<TokenBuffer>}.
* parameters.
* @param dataBuffers the source data buffers * @param dataBuffers the source data buffers
* @param jsonFactory the factory to use * @param jsonFactory the factory to use
* @param tokenizeArrayElements if {@code true} and the "top level" JSON * @param tokenizeArrayElements if {@code true} and the "top level" JSON
@ -79,10 +78,10 @@ class Jackson2Tokenizer {
*/ */
public static Flux<TokenBuffer> tokenize(Flux<DataBuffer> dataBuffers, JsonFactory jsonFactory, public static Flux<TokenBuffer> tokenize(Flux<DataBuffer> dataBuffers, JsonFactory jsonFactory,
boolean tokenizeArrayElements) { boolean tokenizeArrayElements) {
try { try {
Jackson2Tokenizer tokenizer = JsonParser parser = jsonFactory.createNonBlockingByteArrayParser();
new Jackson2Tokenizer(jsonFactory.createNonBlockingByteArrayParser(), Jackson2Tokenizer tokenizer = new Jackson2Tokenizer(parser, tokenizeArrayElements);
tokenizeArrayElements);
return dataBuffers.flatMap(tokenizer::tokenize, Flux::error, tokenizer::endOfInput); return dataBuffers.flatMap(tokenizer::tokenize, Flux::error, tokenizer::endOfInput);
} }
catch (IOException ex) { catch (IOException ex) {

102
spring-web/src/test/java/org/springframework/http/codec/json/Jackson2TokenizerTests.java

@ -41,6 +41,7 @@ import static java.util.Collections.*;
/** /**
* @author Arjen Poutsma * @author Arjen Poutsma
* @author Rossen Stoyanchev
*/ */
public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase { public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase {
@ -48,10 +49,11 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
private JsonFactory jsonFactory; private JsonFactory jsonFactory;
@Before @Before
public void createParser() throws IOException { public void createParser() {
jsonFactory = new JsonFactory(); this.jsonFactory = new JsonFactory();
this.objectMapper = new ObjectMapper(jsonFactory); this.objectMapper = new ObjectMapper(this.jsonFactory);
} }
@Test @Test
@ -66,41 +68,44 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
singletonList("{\"foo\":\"foofoo\",\"bar\":\"barbar\"}"), false); singletonList("{\"foo\":\"foofoo\",\"bar\":\"barbar\"}"), false);
testTokenize( testTokenize(
singletonList("[{\"foo\": \"foofoo\", \"bar\": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"), singletonList("[" +
singletonList("[{\"foo\": \"foofoo\", \"bar\": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"), false); "{\"foo\": \"foofoo\", \"bar\": \"barbar\"}," +
"{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),
singletonList("[" +
"{\"foo\": \"foofoo\", \"bar\": \"barbar\"}," +
"{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"), false);
testTokenize( testTokenize(
singletonList("[{\"foo\": \"bar\"},{\"foo\": \"baz\"}]"), singletonList("[{\"foo\": \"bar\"},{\"foo\": \"baz\"}]"),
singletonList("[{\"foo\": \"bar\"},{\"foo\": \"baz\"}]"), false); singletonList("[{\"foo\": \"bar\"},{\"foo\": \"baz\"}]"), false);
testTokenize( testTokenize(
asList("[{\"foo\": \"foofoo\", \"bar\"", asList("[" +
": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"), "{\"foo\": \"foofoo\", \"bar\"", ": \"barbar\"}," +
singletonList("[{\"foo\": \"foofoo\", \"bar\": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"), false); "{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),
singletonList("[" +
"{\"foo\": \"foofoo\", \"bar\": \"barbar\"}," +
"{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"), false);
testTokenize( testTokenize(
asList("[", asList("[",
"{\"id\":1,\"name\":\"Robert\"}", "{\"id\":1,\"name\":\"Robert\"}", ",",
",", "{\"id\":2,\"name\":\"Raide\"}", ",",
"{\"id\":2,\"name\":\"Raide\"}", "{\"id\":3,\"name\":\"Ford\"}", "]"),
",", singletonList("[" +
"{\"id\":3,\"name\":\"Ford\"}", "{\"id\":1,\"name\":\"Robert\"}," +
"]"), "{\"id\":2,\"name\":\"Raide\"}," +
singletonList("[{\"id\":1,\"name\":\"Robert\"},{\"id\":2,\"name\":\"Raide\"},{\"id\":3,\"name\":\"Ford\"}]"), false); "{\"id\":3,\"name\":\"Ford\"}]"), false);
// SPR-16166: top-level JSON values // SPR-16166: top-level JSON values
testTokenize(asList("\"foo", "bar\"") testTokenize(asList("\"foo", "bar\""),singletonList("\"foobar\""), false);
,singletonList("\"foobar\""), false);
testTokenize(asList("12", "34") testTokenize(asList("12", "34"),singletonList("1234"), false);
,singletonList("1234"), false);
testTokenize(asList("12.", "34") testTokenize(asList("12.", "34"),singletonList("12.34"), false);
,singletonList("12.34"), false);
// note that we do not test for null, true, or false, which are also valid top-level values, // note that we do not test for null, true, or false, which are also valid top-level values,
// but are unsupported by JSONassert // but are unsupported by JSONassert
} }
@Test @Test
@ -110,19 +115,20 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
singletonList("{\"foo\": \"foofoo\", \"bar\": \"barbar\"}"), true); singletonList("{\"foo\": \"foofoo\", \"bar\": \"barbar\"}"), true);
testTokenize( testTokenize(
asList("{\"foo\": \"foofoo\"", asList("{\"foo\": \"foofoo\"", ", \"bar\": \"barbar\"}"),
", \"bar\": \"barbar\"}"),
singletonList("{\"foo\":\"foofoo\",\"bar\":\"barbar\"}"), true); singletonList("{\"foo\":\"foofoo\",\"bar\":\"barbar\"}"), true);
testTokenize( testTokenize(
singletonList("[{\"foo\": \"foofoo\", \"bar\": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"), singletonList("[" +
asList("{\"foo\": \"foofoo\", \"bar\": \"barbar\"}", "{\"foo\": \"foofoo\", \"bar\": \"barbar\"}," +
"{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),
asList(
"{\"foo\": \"foofoo\", \"bar\": \"barbar\"}",
"{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}"), true); "{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}"), true);
testTokenize( testTokenize(
singletonList("[{\"foo\": \"bar\"},{\"foo\": \"baz\"}]"), singletonList("[{\"foo\": \"bar\"},{\"foo\": \"baz\"}]"),
asList("{\"foo\": \"bar\"}", asList("{\"foo\": \"bar\"}", "{\"foo\": \"baz\"}"), true);
"{\"foo\": \"baz\"}"), true);
// SPR-15803: nested array // SPR-15803: nested array
testTokenize( testTokenize(
@ -134,8 +140,7 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
asList( asList(
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}", "{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}", "{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}"), "{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}"), true);
true);
// SPR-15803: nested array, no top-level array // SPR-15803: nested array, no top-level array
testTokenize( testTokenize(
@ -143,9 +148,11 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
singletonList("{\"speakerIds\":[\"tastapod\"],\"language\":\"ENGLISH\"}"), true); singletonList("{\"speakerIds\":[\"tastapod\"],\"language\":\"ENGLISH\"}"), true);
testTokenize( testTokenize(
asList("[{\"foo\": \"foofoo\", \"bar\"", asList("[" +
": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"), "{\"foo\": \"foofoo\", \"bar\"", ": \"barbar\"}," +
asList("{\"foo\": \"foofoo\", \"bar\": \"barbar\"}", "{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),
asList(
"{\"foo\": \"foofoo\", \"bar\": \"barbar\"}",
"{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}"), true); "{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}"), true);
testTokenize( testTokenize(
@ -161,18 +168,14 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
"{\"id\":3,\"name\":\"Ford\"}"), true); "{\"id\":3,\"name\":\"Ford\"}"), true);
// SPR-16166: top-level JSON values // SPR-16166: top-level JSON values
testTokenize(asList("\"foo", "bar\"") testTokenize(asList("\"foo", "bar\""),singletonList("\"foobar\""), true);
,singletonList("\"foobar\""), true);
testTokenize(asList("12", "34") testTokenize(asList("12", "34"),singletonList("1234"), true);
,singletonList("1234"), true);
testTokenize(asList("12.", "34") testTokenize(asList("12.", "34"),singletonList("12.34"), true);
,singletonList("12.34"), true);
// SPR-16407 // SPR-16407
testTokenize(asList("[1", ",2,", "3]"), testTokenize(asList("[1", ",2,", "3]"), asList("1", "2", "3"), true);
asList("1", "2", "3"), true);
} }
@Test(expected = DecodingException.class) // SPR-16521 @Test(expected = DecodingException.class) // SPR-16521
@ -184,11 +187,11 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
private void testTokenize(List<String> source, List<String> expected, boolean tokenizeArrayElements) { private void testTokenize(List<String> source, List<String> expected, boolean tokenizeArrayElements) {
Flux<DataBuffer> sourceFlux = Flux.fromIterable(source)
.map(this::stringBuffer);
Flux<TokenBuffer> tokenBufferFlux = Flux<TokenBuffer> tokenBufferFlux = Jackson2Tokenizer.tokenize(
Jackson2Tokenizer.tokenize(sourceFlux, this.jsonFactory, tokenizeArrayElements); Flux.fromIterable(source).map(this::stringBuffer),
this.jsonFactory,
tokenizeArrayElements);
Flux<String> result = tokenBufferFlux Flux<String> result = tokenBufferFlux
.map(tokenBuffer -> { .map(tokenBuffer -> {
@ -202,17 +205,16 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
}); });
StepVerifier.FirstStep<String> builder = StepVerifier.create(result); StepVerifier.FirstStep<String> builder = StepVerifier.create(result);
for (String s : expected) { expected.forEach(s -> builder.assertNext(new JSONAssertConsumer(s)));
builder.assertNext(new JSONAssertConsumer(s));
}
builder.verifyComplete(); builder.verifyComplete();
} }
private static class JSONAssertConsumer implements Consumer<String> { private static class JSONAssertConsumer implements Consumer<String> {
private final String expected; private final String expected;
public JSONAssertConsumer(String expected) { JSONAssertConsumer(String expected) {
this.expected = expected; this.expected = expected;
} }
@ -226,6 +228,4 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
} }
} }
} }
} }
Loading…
Cancel
Save