Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DataChunkedInputStream deadlock protection removed #2401

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
7 changes: 7 additions & 0 deletions common/http/src/main/java/io/helidon/common/http/Content.java
Expand Up @@ -16,7 +16,9 @@

package io.helidon.common.http;

import java.util.concurrent.Executor;
import java.util.concurrent.Flow;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;

Expand Down Expand Up @@ -128,6 +130,11 @@ public interface Content extends Multi<DataChunk> {
* The conversion requires an appropriate reader to be already registered
* (see {@link #registerReader(Predicate, Reader)}). If no such reader is found, the
* resulting completion stage ends exceptionally.
* <p>
* Any callback related to the returned value, should not be blocking. Blocking operation could cause deadlock.
* If you need to use blocking API such as {@link java.io.InputStream} it is highly recommended to do so out of
* the scope of reactive chain, or to use methods like
* {@link java.util.concurrent.CompletionStage#thenAcceptAsync(Consumer, Executor)}.
*
* @param <T> the requested type
* @param type the requested type class
Expand Down
Expand Up @@ -38,8 +38,6 @@
public class DataChunkInputStream extends InputStream {
private static final Logger LOGGER = Logger.getLogger(DataChunkInputStream.class.getName());

private final String originalThreadID;
private final boolean validate;
private final Flow.Publisher<DataChunk> originalPublisher;
private int bufferIndex;
private CompletableFuture<DataChunk> current = new CompletableFuture<>();
Expand Down Expand Up @@ -74,8 +72,6 @@ public DataChunkInputStream(Flow.Publisher<DataChunk> originalPublisher) {
*/
public DataChunkInputStream(Flow.Publisher<DataChunk> originalPublisher, boolean validate) {
this.originalPublisher = originalPublisher;
this.originalThreadID = getCurrentThreadIdent();
this.validate = validate;
}

/**
Expand Down Expand Up @@ -120,7 +116,6 @@ public int read() throws IOException {

@Override
public int read(byte[] buf, int off, int len) throws IOException {
validate();
if (subscribed.compareAndSet(false, true)) {
originalPublisher.subscribe(new DataChunkSubscriber()); // subscribe for first time
}
Expand Down Expand Up @@ -180,17 +175,6 @@ public int read(byte[] buf, int off, int len) throws IOException {
}
}

private String getCurrentThreadIdent() {
Thread thread = Thread.currentThread();
return thread.getName() + ":" + thread.getId();
}

private void validate() {
if (validate && originalThreadID.equals(getCurrentThreadIdent())) {
throw new IllegalStateException("DataChunkInputStream needs to be handled in separate thread to prevent deadlock.");
}
}

// -- DataChunkSubscriber -------------------------------------------------
//
// Following methods are executed by Netty IO threads (except first chunk)
Expand Down
Expand Up @@ -18,7 +18,6 @@
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;

import javax.json.Json;
Expand All @@ -34,7 +33,6 @@

import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.fail;

/**
Expand Down Expand Up @@ -166,55 +164,6 @@ public void testRequestSpecificReader() throws Exception {
.get();
}

@Test
public void testInputStreamSameThread() {
ExecutionException exception = assertThrows(ExecutionException.class, () -> {
webClient.get()
.request(InputStream.class)
.thenApply(it -> {
try {
it.readAllBytes();
} catch (IOException ignored) {
}
fail("This should have failed!");
return CompletableFuture.completedFuture(it);
})
.toCompletableFuture()
.get();
});
if (exception.getCause() instanceof IllegalStateException) {
assertThat(exception.getCause().getMessage(),
is("DataChunkInputStream needs to be handled in separate thread to prevent deadlock."));
} else {
fail(exception);
}
}

@Test
public void testInputStreamSameThreadTestContentAs() {
ExecutionException exception = assertThrows(ExecutionException.class, () -> {
webClient.get()
.request()
.thenCompose(it -> it.content().as(InputStream.class))
.thenApply(it -> {
try {
it.readAllBytes();
} catch (IOException ignored) {
}
fail("This should have failed!");
return CompletableFuture.completedFuture(it);
})
.toCompletableFuture()
.get();
});
if (exception.getCause() instanceof IllegalStateException) {
assertThat(exception.getCause().getMessage(),
is("DataChunkInputStream needs to be handled in separate thread to prevent deadlock."));
} else {
fail(exception);
}
}

@Test
public void testInputStreamDifferentThread() throws IOException, ExecutionException, InterruptedException {
InputStream is = webClient.get()
Expand Down