Browse Source

Jackson2JsonTokenizer unwraps top level array only

When the input stream is a JSON array the tokenizer skips over the
start and end array tokens in order to decode into an array of
top-level objects. However in this mode it also skips over nested
start and end array tokens which prevents proper parsing of JSR-310
dates (represented with an array syntax) as well as any fields that
are collections.

Issue: SPR-15803
pull/1484/head
Rossen Stoyanchev 9 years ago
parent
commit
bd0de7086e
  1. 7
      spring-web/src/main/java/org/springframework/http/codec/json/Jackson2Tokenizer.java
  2. 13
      spring-web/src/test/java/org/springframework/http/codec/json/Jackson2TokenizerTests.java

7
spring-web/src/main/java/org/springframework/http/codec/json/Jackson2Tokenizer.java

@ -140,7 +140,7 @@ class Jackson2Tokenizer implements Function<DataBuffer, Flux<TokenBuffer>> { @@ -140,7 +140,7 @@ class Jackson2Tokenizer implements Function<DataBuffer, Flux<TokenBuffer>> {
}
private void processTokenArray(JsonToken token, List<TokenBuffer> result) throws IOException {
if (token != JsonToken.START_ARRAY && token != JsonToken.END_ARRAY) {
if (!isTopLevelArrayToken(token)) {
this.tokenBuffer.copyCurrentEvent(this.parser);
}
@ -151,6 +151,11 @@ class Jackson2Tokenizer implements Function<DataBuffer, Flux<TokenBuffer>> { @@ -151,6 +151,11 @@ class Jackson2Tokenizer implements Function<DataBuffer, Flux<TokenBuffer>> {
}
}
private boolean isTopLevelArrayToken(JsonToken token) {
return (token == JsonToken.START_ARRAY && this.arrayDepth == 1) ||
(token == JsonToken.END_ARRAY && this.arrayDepth == 0);
}
public void endOfInput() {
this.inputFeeder.endOfInput();
}

13
spring-web/src/test/java/org/springframework/http/codec/json/Jackson2TokenizerTests.java

@ -116,6 +116,19 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase @@ -116,6 +116,19 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
asList("{\"foo\": \"bar\"}",
"{\"foo\": \"baz\"}"));
// SPR-15803
testTokenize(
singletonList("[" +
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}," +
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}," +
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}" +
"]"),
asList(
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}")
);
testTokenize(
asList("[{\"foo\": \"foofoo\", \"bar\"",
": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),

Loading…
Cancel
Save