JsonEOFException is wrapped as DecodingError

Jackson2Tokenizer now also wraps JsonProcessingException's on
endOfInput(), as it also does in tokenize(DataBuffer). This ensures
treating those errors as 400 vs 500.

Issue: SPR-16521
master
Rossen Stoyanchev 7 years ago
parent a6d527e57a
commit 6959e66b25
  1. 6
      spring-web/src/main/java/org/springframework/http/codec/json/Jackson2Tokenizer.java
  2. 13
      spring-web/src/test/java/org/springframework/http/codec/json/Jackson2TokenizerTests.java

@ -27,7 +27,6 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.async.ByteArrayFeeder; import com.fasterxml.jackson.core.async.ByteArrayFeeder;
import com.fasterxml.jackson.databind.util.TokenBuffer; import com.fasterxml.jackson.databind.util.TokenBuffer;
import org.jetbrains.annotations.NotNull;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import org.springframework.core.codec.DecodingException; import org.springframework.core.codec.DecodingException;
@ -114,12 +113,15 @@ class Jackson2Tokenizer {
try { try {
return parseTokenBufferFlux(); return parseTokenBufferFlux();
} }
catch (JsonProcessingException ex) {
return Flux.error(new DecodingException(
"JSON decoding error: " + ex.getOriginalMessage(), ex));
}
catch (IOException ex) { catch (IOException ex) {
return Flux.error(ex); return Flux.error(ex);
} }
} }
@NotNull
private Flux<TokenBuffer> parseTokenBufferFlux() throws IOException { private Flux<TokenBuffer> parseTokenBufferFlux() throws IOException {
List<TokenBuffer> result = new ArrayList<>(); List<TokenBuffer> result = new ArrayList<>();

@ -32,11 +32,12 @@ import org.skyscreamer.jsonassert.JSONAssert;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.test.StepVerifier; import reactor.test.StepVerifier;
import org.springframework.core.codec.DecodingException;
import org.springframework.core.io.buffer.AbstractDataBufferAllocatingTestCase; import org.springframework.core.io.buffer.AbstractDataBufferAllocatingTestCase;
import org.springframework.core.io.buffer.DataBuffer; import org.springframework.core.io.buffer.DataBuffer;
import static java.util.Arrays.asList; import static java.util.Arrays.*;
import static java.util.Collections.singletonList; import static java.util.Collections.*;
/** /**
* @author Arjen Poutsma * @author Arjen Poutsma
@ -174,6 +175,14 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
asList("1", "2", "3"), true); asList("1", "2", "3"), true);
} }
@Test(expected = DecodingException.class) // SPR-16521
public void jsonEOFExceptionIsWrappedAsDecodingError() {
Flux<DataBuffer> source = Flux.just(stringBuffer("{\"status\": \"noClosingQuote}"));
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(source, this.jsonFactory, false);
tokens.blockLast();
}
private void testTokenize(List<String> source, List<String> expected, boolean tokenizeArrayElements) { private void testTokenize(List<String> source, List<String> expected, boolean tokenizeArrayElements) {
Flux<DataBuffer> sourceFlux = Flux.fromIterable(source) Flux<DataBuffer> sourceFlux = Flux.fromIterable(source)
.map(this::stringBuffer); .map(this::stringBuffer);

Loading…
Cancel
Save