@ -27,6 +27,10 @@ import com.fasterxml.jackson.core.JsonToken;
@@ -27,6 +27,10 @@ import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.TreeNode ;
import com.fasterxml.jackson.databind.ObjectMapper ;
import com.fasterxml.jackson.databind.util.TokenBuffer ;
import io.netty.buffer.ByteBuf ;
import io.netty.buffer.ByteBufAllocator ;
import io.netty.buffer.CompositeByteBuf ;
import io.netty.buffer.UnpooledByteBufAllocator ;
import org.json.JSONException ;
import org.junit.jupiter.api.BeforeEach ;
import org.junit.jupiter.api.Test ;
@ -37,6 +41,7 @@ import reactor.test.StepVerifier;
@@ -37,6 +41,7 @@ import reactor.test.StepVerifier;
import org.springframework.core.codec.DecodingException ;
import org.springframework.core.io.buffer.DataBuffer ;
import org.springframework.core.io.buffer.DataBufferLimitException ;
import org.springframework.core.io.buffer.NettyDataBufferFactory ;
import org.springframework.core.testfixture.io.buffer.AbstractLeakCheckingTests ;
import static java.util.Arrays.asList ;
@ -337,22 +342,47 @@ public class Jackson2TokenizerTests extends AbstractLeakCheckingTests {
@@ -337,22 +342,47 @@ public class Jackson2TokenizerTests extends AbstractLeakCheckingTests {
. verifyComplete ( ) ;
}
// gh-31747
@Test
public void compositeNettyBuffer ( ) {
ByteBufAllocator allocator = UnpooledByteBufAllocator . DEFAULT ;
ByteBuf firstByteBuf = allocator . buffer ( ) ;
firstByteBuf . writeBytes ( "{\"foo\": \"foofoo\"" . getBytes ( StandardCharsets . UTF_8 ) ) ;
ByteBuf secondBuf = allocator . buffer ( ) ;
secondBuf . writeBytes ( ", \"bar\": \"barbar\"}" . getBytes ( StandardCharsets . UTF_8 ) ) ;
CompositeByteBuf composite = allocator . compositeBuffer ( ) ;
composite . addComponent ( true , firstByteBuf ) ;
composite . addComponent ( true , secondBuf ) ;
NettyDataBufferFactory bufferFactory = new NettyDataBufferFactory ( allocator ) ;
Flux < DataBuffer > source = Flux . just ( bufferFactory . wrap ( composite ) ) ;
Flux < TokenBuffer > tokens = Jackson2Tokenizer . tokenize ( source , this . jsonFactory , this . objectMapper , false , false , - 1 ) ;
Flux < String > strings = tokens . map ( this : : tokenToString ) ;
StepVerifier . create ( strings )
. assertNext ( s - > assertThat ( s ) . isEqualTo ( "{\"foo\":\"foofoo\",\"bar\":\"barbar\"}" ) )
. verifyComplete ( ) ;
}
private Flux < String > decode ( List < String > source , boolean tokenize , int maxInMemorySize ) {
Flux < TokenBuffer > tokens = Jackson2Tokenizer . tokenize (
Flux . fromIterable ( source ) . map ( this : : stringBuffer ) ,
this . jsonFactory , this . objectMapper , tokenize , false , maxInMemorySize ) ;
return tokens
. map ( tokenBuffer - > {
try {
TreeNode root = this . objectMapper . readTree ( tokenBuffer . asParser ( ) ) ;
return this . objectMapper . writeValueAsString ( root ) ;
}
catch ( IOException ex ) {
throw new UncheckedIOException ( ex ) ;
}
} ) ;
return tokens . map ( this : : tokenToString ) ;
}
private String tokenToString ( TokenBuffer tokenBuffer ) {
try {
TreeNode root = this . objectMapper . readTree ( tokenBuffer . asParser ( ) ) ;
return this . objectMapper . writeValueAsString ( root ) ;
}
catch ( IOException ex ) {
throw new UncheckedIOException ( ex ) ;
}
}
private DataBuffer stringBuffer ( String value ) {