Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace Jackson library by simple data I/O streams #8693

Merged
merged 4 commits into from
Jan 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -1557,7 +1557,7 @@ lazy val runtime = (project in file("engine/runtime"))
), // show timings for individual tests
scalacOptions += "-Ymacro-annotations",
scalacOptions ++= Seq("-Ypatmat-exhaust-depth", "off"),
libraryDependencies ++= jmh ++ jaxb ++ circe ++ GraalVM.langsPkgs ++ Seq(
libraryDependencies ++= jmh ++ jaxb ++ GraalVM.langsPkgs ++ Seq(
"org.apache.commons" % "commons-lang3" % commonsLangVersion,
"org.apache.tika" % "tika-core" % tikaVersion,
"org.graalvm.polyglot" % "polyglot" % graalMavenPackagesVersion % "provided",
Expand All @@ -1571,7 +1571,6 @@ lazy val runtime = (project in file("engine/runtime"))
"org.scalactic" %% "scalactic" % scalacticVersion % Test,
"org.scalatest" %% "scalatest" % scalatestVersion % Test,
"org.graalvm.truffle" % "truffle-api" % graalMavenPackagesVersion % Benchmark,
"org.typelevel" %% "cats-core" % catsVersion,
"junit" % "junit" % junitVersion % Test,
"com.github.sbt" % "junit-interface" % junitIfVersion % Test,
"org.hamcrest" % "hamcrest-all" % hamcrestVersion % Test,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
import com.oracle.truffle.api.TruffleLogger;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
Expand Down Expand Up @@ -175,8 +173,10 @@ private boolean ensureRoot(TruffleFile cacheRoot) {
* @param blobDigest digest of serialized data
* @param entry data to serialize
* @return raw bytes representing serialized metadata
* @throws java.io.IOException in case of I/O error
*/
protected abstract byte[] metadata(String sourceDigest, String blobDigest, T entry);
protected abstract byte[] metadata(String sourceDigest, String blobDigest, T entry)
throws IOException;

/**
* Loads cache for this data, if possible.
Expand Down Expand Up @@ -333,9 +333,12 @@ private Optional<M> loadCacheMetadata(TruffleFile path, TruffleLogger logger) th
* De-serializes raw bytes to data's metadata.
*
* @param bytes raw bytes representing metadata
* @param logger logger to use
* @return non-empty metadata, if de-serialization was successful
* @throws IOException in case of I/O error
*/
protected abstract Optional<M> metadataFromBytes(byte[] bytes, TruffleLogger logger);
protected abstract Optional<M> metadataFromBytes(byte[] bytes, TruffleLogger logger)
throws IOException;

/**
* Compute digest of cache's data
Expand Down Expand Up @@ -509,8 +512,6 @@ public final void invalidate(EnsoContext context) {
}
}

protected static final Charset metadataCharset = StandardCharsets.UTF_8;

/**
* Roots encapsulates two possible locations where caches can be stored.
*
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
package org.enso.interpreter.caches;

import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleFile;
import com.oracle.truffle.api.TruffleLogger;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
Expand Down Expand Up @@ -40,14 +41,9 @@ public ImportExportCache(LibraryName libraryName) {
}

@Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedBindings entry) {
try {
return objectMapper
.writeValueAsString(new Metadata(sourceDigest, blobDigest))
.getBytes(metadataCharset);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
protected byte[] metadata(String sourceDigest, String blobDigest, CachedBindings entry)
throws IOException {
return new Metadata(sourceDigest, blobDigest).toBytes();
}

@Override
Expand All @@ -60,15 +56,9 @@ protected CachedBindings deserialize(
}

@Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) {
var maybeJsonString = new String(bytes, Cache.metadataCharset);
var mapper = new ObjectMapper();
try {
return Optional.of(objectMapper.readValue(maybeJsonString, ImportExportCache.Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize library's metadata.", e);
return Optional.empty();
}
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger)
throws IOException {
return Optional.of(Metadata.read(bytes));
}

@Override
Expand Down Expand Up @@ -172,16 +162,28 @@ public static record CachedBindings(
MapToBindings bindings,
Optional<List<SourceFile<TruffleFile>>> sources) {}

public record Metadata(
@JsonProperty("source_hash") String sourceHash, @JsonProperty("blob_hash") String blobHash)
implements Cache.Metadata {}
public record Metadata(String sourceHash, String blobHash) implements Cache.Metadata {
byte[] toBytes() throws IOException {
try (var os = new ByteArrayOutputStream();
var dos = new DataOutputStream(os)) {
dos.writeUTF(sourceHash());
dos.writeUTF(blobHash());
return os.toByteArray();
}
}

static Metadata read(byte[] arr) throws IOException {
try (var is = new ByteArrayInputStream(arr);
var dis = new DataInputStream(is)) {
return new Metadata(dis.readUTF(), dis.readUTF());
}
}
}

private static final String bindingsCacheDataExtension = ".bindings";

private static final String bindingsCacheMetadataExtension = ".bindings.meta";

private static final ObjectMapper objectMapper = new ObjectMapper();

@Persistable(clazz = BindingsMap.PolyglotSymbol.class, id = 33006)
@Persistable(
clazz = org.enso.compiler.data.BindingsMap$ModuleReference$Abstract.class,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
package org.enso.interpreter.caches;

import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleLogger;
import com.oracle.truffle.api.source.Source;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Optional;
Expand All @@ -33,13 +35,9 @@ public ModuleCache(org.enso.interpreter.runtime.Module module) {
}

@Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedModule entry) {
try {
return objectMapper.writeValueAsBytes(
new Metadata(sourceDigest, blobDigest, entry.compilationStage().toString()));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
protected byte[] metadata(String sourceDigest, String blobDigest, CachedModule entry)
throws IOException {
return new Metadata(sourceDigest, blobDigest, entry.compilationStage().toString()).toBytes();
}

@Override
Expand Down Expand Up @@ -69,14 +67,9 @@ protected CachedModule deserialize(
}

@Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) {
var maybeJsonString = new String(bytes, Cache.metadataCharset);
try {
return Optional.of(objectMapper.readValue(maybeJsonString, Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize module's metadata.", e);
return Optional.empty();
}
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger)
throws IOException {
return Optional.of(Metadata.read(bytes));
}

private Optional<String> computeDigestOfModuleSources(Source source) {
Expand All @@ -85,7 +78,7 @@ private Optional<String> computeDigestOfModuleSources(Source source) {
if (source.hasBytes()) {
sourceBytes = source.getBytes().toByteArray();
} else {
sourceBytes = source.getCharacters().toString().getBytes(metadataCharset);
sourceBytes = source.getCharacters().toString().getBytes(StandardCharsets.UTF_8);
}
return Optional.of(computeDigestFromBytes(sourceBytes));
} else {
Expand Down Expand Up @@ -171,18 +164,30 @@ protected byte[] serialize(EnsoContext context, CachedModule entry) throws IOExc

public record CachedModule(Module moduleIR, CompilationStage compilationStage, Source source) {}

public record Metadata(
@JsonProperty("source_hash") String sourceHash,
@JsonProperty("blob_hash") String blobHash,
@JsonProperty("compilation_stage") String compilationStage)
implements Cache.Metadata {}
public record Metadata(String sourceHash, String blobHash, String compilationStage)
implements Cache.Metadata {
byte[] toBytes() throws IOException {
try (var os = new ByteArrayOutputStream();
var dos = new DataOutputStream(os)) {
dos.writeUTF(sourceHash());
dos.writeUTF(blobHash());
dos.writeUTF(compilationStage());
return os.toByteArray();
}
}

static Metadata read(byte[] arr) throws IOException {
try (var is = new ByteArrayInputStream(arr);
var dis = new DataInputStream(is)) {
return new Metadata(dis.readUTF(), dis.readUTF(), dis.readUTF());
}
}
}

private static final String irCacheDataExtension = ".ir";

private static final String irCacheMetadataExtension = ".meta";

private static final ObjectMapper objectMapper = new ObjectMapper();

@SuppressWarnings("unchecked")
private static <T extends Exception> T raise(Class<T> cls, Exception e) throws T {
throw (T) e;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
package org.enso.interpreter.caches;

import buildinfo.Info;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.oracle.truffle.api.TruffleFile;
import com.oracle.truffle.api.TruffleLogger;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
Expand All @@ -27,8 +26,6 @@ public final class SuggestionsCache
private static final String SUGGESTIONS_CACHE_DATA_EXTENSION = ".suggestions";
private static final String SUGGESTIONS_CACHE_METADATA_EXTENSION = ".suggestions.meta";

private static final ObjectMapper objectMapper = new ObjectMapper();

final LibraryName libraryName;

public SuggestionsCache(LibraryName libraryName) {
Expand All @@ -40,14 +37,9 @@ public SuggestionsCache(LibraryName libraryName) {
}

@Override
protected byte[] metadata(String sourceDigest, String blobDigest, CachedSuggestions entry) {
try {
return objectMapper
.writeValueAsString(new Metadata(sourceDigest, blobDigest))
.getBytes(metadataCharset);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
protected byte[] metadata(String sourceDigest, String blobDigest, CachedSuggestions entry)
throws IOException {
return new Metadata(sourceDigest, blobDigest).toBytes();
}

@Override
Expand All @@ -65,14 +57,9 @@ protected CachedSuggestions deserialize(
}

@Override
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger) {
var maybeJsonString = new String(bytes, Cache.metadataCharset);
try {
return Optional.of(objectMapper.readValue(maybeJsonString, SuggestionsCache.Metadata.class));
} catch (JsonProcessingException e) {
logger.log(logLevel, "Failed to deserialize suggestions' metadata.", e);
return Optional.empty();
}
protected Optional<Metadata> metadataFromBytes(byte[] bytes, TruffleLogger logger)
throws IOException {
return Optional.of(Metadata.read(bytes));
}

@Override
Expand Down Expand Up @@ -173,7 +160,21 @@ public List<Suggestion> getSuggestions() {
}
}

record Metadata(
@JsonProperty("source_hash") String sourceHash, @JsonProperty("blob_hash") String blobHash)
implements Cache.Metadata {}
record Metadata(String sourceHash, String blobHash) implements Cache.Metadata {
byte[] toBytes() throws IOException {
try (var os = new ByteArrayOutputStream();
var dos = new DataOutputStream(os)) {
dos.writeUTF(sourceHash());
dos.writeUTF(blobHash());
return os.toByteArray();
}
}

static Metadata read(byte[] arr) throws IOException {
try (var is = new ByteArrayInputStream(arr);
var dis = new DataInputStream(is)) {
return new Metadata(dis.readUTF(), dis.readUTF());
}
}
}
}
Loading