Skip to content

Commit

Permalink
Support for ASCII in Jackson codec & converter
Browse files Browse the repository at this point in the history
This commit introduces support for writing JSON with an US-ASCII
character encoding in the Jackson encoder and message converter,
treating it like UTF-8.

See gh-25322
  • Loading branch information
poutsma committed Jun 30, 2020
1 parent 51a5517 commit 79c339b
Show file tree
Hide file tree
Showing 5 changed files with 75 additions and 16 deletions.
Expand Up @@ -76,10 +76,11 @@ public abstract class AbstractJackson2Encoder extends Jackson2CodecSupport imple
STREAM_SEPARATORS.put(MediaType.APPLICATION_STREAM_JSON, NEWLINE_SEPARATOR);
STREAM_SEPARATORS.put(MediaType.parseMediaType("application/stream+x-jackson-smile"), new byte[0]);

ENCODINGS = new HashMap<>(JsonEncoding.values().length);
ENCODINGS = new HashMap<>(JsonEncoding.values().length + 1);
for (JsonEncoding encoding : JsonEncoding.values()) {
ENCODINGS.put(encoding.getJavaName(), encoding);
}
ENCODINGS.put("US-ASCII", JsonEncoding.UTF8);
}


Expand Down
Expand Up @@ -24,11 +24,9 @@
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.stream.Collectors;

import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonGenerator;
Expand Down Expand Up @@ -76,7 +74,16 @@
*/
public abstract class AbstractJackson2HttpMessageConverter extends AbstractGenericHttpMessageConverter<Object> {

private static final Map<String, JsonEncoding> ENCODINGS = jsonEncodings();
private static final Map<String, JsonEncoding> ENCODINGS;

static {
ENCODINGS = new HashMap<>(JsonEncoding.values().length + 1);
for (JsonEncoding encoding : JsonEncoding.values()) {
ENCODINGS.put(encoding.getJavaName(), encoding);
}
ENCODINGS.put("US-ASCII", JsonEncoding.UTF8);
}


/**
* The default charset used by the converter.
Expand Down Expand Up @@ -399,9 +406,4 @@ protected Long getContentLength(Object object, @Nullable MediaType contentType)
return super.getContentLength(object, contentType);
}

private static Map<String, JsonEncoding> jsonEncodings() {
return EnumSet.allOf(JsonEncoding.class).stream()
.collect(Collectors.toMap(JsonEncoding::getJavaName, Function.identity()));
}

}
Expand Up @@ -87,6 +87,8 @@ public void canDecode() {
assertThat(decoder.canDecode(forClass(Pojo.class), APPLICATION_XML)).isFalse();
assertThat(this.decoder.canDecode(ResolvableType.forClass(Pojo.class),
new MediaType("application", "json", StandardCharsets.UTF_8))).isTrue();
assertThat(this.decoder.canDecode(ResolvableType.forClass(Pojo.class),
new MediaType("application", "json", StandardCharsets.US_ASCII))).isTrue();
assertThat(this.decoder.canDecode(ResolvableType.forClass(Pojo.class),
new MediaType("application", "json", StandardCharsets.ISO_8859_1))).isTrue();
}
Expand Down Expand Up @@ -227,8 +229,7 @@ public void bigDecimalFlux() {
public void decodeNonUtf8Encoding() {
Mono<DataBuffer> input = stringBuffer("{\"foo\":\"bar\"}", StandardCharsets.UTF_16);

testDecode(input, ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {
}),
testDecode(input, ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {}),
step -> step.assertNext(o -> assertThat((Map<String, String>) o).containsEntry("foo", "bar"))
.verifyComplete(),
MediaType.parseMediaType("application/json; charset=utf-16"),
Expand All @@ -242,8 +243,7 @@ public void decodeNonUnicode() {
stringBuffer("{\"føø\":\"bår\"}", StandardCharsets.ISO_8859_1)
);

testDecode(input, ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {
}),
testDecode(input, ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {}),
step -> step.assertNext(o -> assertThat((Map<String, String>) o).containsEntry("føø", "bår"))
.verifyComplete(),
MediaType.parseMediaType("application/json; charset=iso-8859-1"),
Expand All @@ -255,14 +255,28 @@ public void decodeNonUnicode() {
public void decodeMonoNonUtf8Encoding() {
Mono<DataBuffer> input = stringBuffer("{\"foo\":\"bar\"}", StandardCharsets.UTF_16);

testDecodeToMono(input, ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {
}),
testDecodeToMono(input, ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {}),
step -> step.assertNext(o -> assertThat((Map<String, String>) o).containsEntry("foo", "bar"))
.verifyComplete(),
MediaType.parseMediaType("application/json; charset=utf-16"),
null);
}

@Test
@SuppressWarnings("unchecked")
public void decodeAscii() {
Flux<DataBuffer> input = Flux.concat(
stringBuffer("{\"foo\":\"bar\"}", StandardCharsets.US_ASCII)
);

testDecode(input, ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {}),
step -> step.assertNext(o -> assertThat((Map<String, String>) o).containsEntry("foo", "bar"))
.verifyComplete(),
MediaType.parseMediaType("application/json; charset=us-ascii"),
null);
}


private Mono<DataBuffer> stringBuffer(String value) {
return stringBuffer(value, StandardCharsets.UTF_8);
}
Expand Down
Expand Up @@ -71,6 +71,8 @@ public void canEncode() {

assertThat(this.encoder.canEncode(ResolvableType.forClass(Pojo.class),
new MediaType("application", "json", StandardCharsets.UTF_8))).isTrue();
assertThat(this.encoder.canEncode(ResolvableType.forClass(Pojo.class),
new MediaType("application", "json", StandardCharsets.US_ASCII))).isTrue();
assertThat(this.encoder.canEncode(ResolvableType.forClass(Pojo.class),
new MediaType("application", "json", StandardCharsets.ISO_8859_1))).isFalse();

Expand Down Expand Up @@ -225,6 +227,17 @@ public void encodeWithFlushAfterWriteOff() {
.verify(Duration.ofSeconds(5));
}

@Test
public void encodeAscii() {
Mono<Object> input = Mono.just(new Pojo("foo", "bar"));

testEncode(input, ResolvableType.forClass(Pojo.class), step -> step
.consumeNextWith(expectString("{\"foo\":\"foo\",\"bar\":\"bar\"}"))
.verifyComplete(),
new MimeType("application", "json", StandardCharsets.US_ASCII), null);

}


@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
private static class ParentClass {
Expand Down
Expand Up @@ -67,6 +67,7 @@ public void canRead() {
assertThat(converter.canRead(MyBean.class, new MediaType("application", "json"))).isTrue();
assertThat(converter.canRead(Map.class, new MediaType("application", "json"))).isTrue();
assertThat(converter.canRead(MyBean.class, new MediaType("application", "json", StandardCharsets.UTF_8))).isTrue();
assertThat(converter.canRead(MyBean.class, new MediaType("application", "json", StandardCharsets.US_ASCII))).isTrue();
assertThat(converter.canRead(MyBean.class, new MediaType("application", "json", StandardCharsets.ISO_8859_1))).isTrue();
}

Expand All @@ -75,6 +76,7 @@ public void canWrite() {
assertThat(converter.canWrite(MyBean.class, new MediaType("application", "json"))).isTrue();
assertThat(converter.canWrite(Map.class, new MediaType("application", "json"))).isTrue();
assertThat(converter.canWrite(MyBean.class, new MediaType("application", "json", StandardCharsets.UTF_8))).isTrue();
assertThat(converter.canWrite(MyBean.class, new MediaType("application", "json", StandardCharsets.US_ASCII))).isTrue();
assertThat(converter.canWrite(MyBean.class, new MediaType("application", "json", StandardCharsets.ISO_8859_1))).isFalse();
}

Expand Down Expand Up @@ -460,6 +462,33 @@ public void readNonUnicode() throws Exception {
assertThat(result).containsExactly(entry("føø", "bår"));
}

@Test
@SuppressWarnings("unchecked")
public void readAscii() throws Exception {
String body = "{\"foo\":\"bar\"}";
Charset charset = StandardCharsets.US_ASCII;
MockHttpInputMessage inputMessage = new MockHttpInputMessage(body.getBytes(charset));
inputMessage.getHeaders().setContentType(new MediaType("application", "json", charset));
HashMap<String, Object> result = (HashMap<String, Object>) this.converter.read(HashMap.class, inputMessage);

assertThat(result).containsExactly(entry("foo", "bar"));
}

@Test
@SuppressWarnings("unchecked")
public void writeAscii() throws Exception {
MockHttpOutputMessage outputMessage = new MockHttpOutputMessage();
Map<String,Object> body = new HashMap<>();
body.put("foo", "bar");
Charset charset = StandardCharsets.US_ASCII;
MediaType contentType = new MediaType("application", "json", charset);
converter.write(body, contentType, outputMessage);

String result = outputMessage.getBodyAsString(charset);
assertThat(result).isEqualTo("{\"foo\":\"bar\"}");
assertThat(outputMessage.getHeaders().getContentType()).as("Invalid content-type").isEqualTo(contentType);
}


interface MyInterface {

Expand Down

0 comments on commit 79c339b

Please sign in to comment.