Skip to content

Commit

Permalink
Merge branch '__rultor'
Browse files Browse the repository at this point in the history
  • Loading branch information
rultor committed May 8, 2023
2 parents 53a3b3b + a2feef7 commit 6eacdd6
Show file tree
Hide file tree
Showing 8 changed files with 84 additions and 106 deletions.
109 changes: 42 additions & 67 deletions src/it/producer-consumer-api/src/test/java/EntryTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,42 +23,32 @@
*/

import io.github.eocqrs.kafka.Consumer;
import io.github.eocqrs.kafka.Dataized;
import io.github.eocqrs.kafka.Producer;
import io.github.eocqrs.kafka.consumer.KfConsumer;
import io.github.eocqrs.kafka.consumer.settings.KfConsumerParams;
import io.github.eocqrs.kafka.data.KfData;
import io.github.eocqrs.kafka.parameters.AutoOffsetReset;
import io.github.eocqrs.kafka.parameters.BootstrapServers;
import io.github.eocqrs.kafka.parameters.ClientId;
import io.github.eocqrs.kafka.parameters.GroupId;
import io.github.eocqrs.kafka.parameters.KeyDeserializer;
import io.github.eocqrs.kafka.parameters.KeySerializer;
import io.github.eocqrs.kafka.parameters.KfFlexible;
import io.github.eocqrs.kafka.parameters.KfParams;
import io.github.eocqrs.kafka.parameters.Retries;
import io.github.eocqrs.kafka.parameters.ValueDeserializer;
import io.github.eocqrs.kafka.parameters.ValueSerializer;
import io.github.eocqrs.kafka.producer.KfCallback;
import io.github.eocqrs.kafka.producer.KfProducer;
import io.github.eocqrs.kafka.producer.settings.KfProducerParams;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.cactoos.list.ListOf;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
Expand All @@ -67,34 +57,24 @@
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.KafkaContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.shaded.com.google.common.collect.ImmutableMap;
import org.testcontainers.utility.DockerImageName;

import java.io.IOException;
import java.time.Duration;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;

import static org.assertj.core.api.Assertions.*;

import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.tuple;
/**
* @todo #81 Tests to produce-consume data.
* Write a test which will be check how consumer
* reads data from producer.
*/

/**
* @todo #236:30m/DEV Enable tests
* @todo #290:30m/DEV Split ITCases into multiple files
*/

/**
* Entry test cases.
*
* @author Ivan Ivanchuk (l3r8y@duck.com)
* @author Aliaksei Bialiauski (abialiauski.dev@gmail.com)
* @since 0.0.2
*/
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
Expand All @@ -103,8 +83,8 @@ final class EntryTest {
private static final KafkaContainer KAFKA = new KafkaContainer(
DockerImageName.parse("confluentinc/cp-kafka:7.3.0")
)
.withEnv("auto.create.topics.enable", "true")
.withEnv("KAFKA_CREATE_TOPICS", "TEST-TOPIC")
.withEnv("auto.create.topics.enable", "true")
.withReuse(true)
.withLogConsumer(
new Slf4jLogConsumer(
Expand All @@ -113,7 +93,7 @@ final class EntryTest {
)
)
)
.withExternalZookeeper("localhost:2181");
.withEmbeddedZookeeper();

private static String servers;

Expand Down Expand Up @@ -146,7 +126,6 @@ void runsKafka() {
);
}

@Disabled
@Test
@Order(2)
void createsConsumerAndSubscribes() throws IOException {
Expand All @@ -165,11 +144,14 @@ void createsConsumerAndSubscribes() throws IOException {
)
)
) {
Assertions.assertDoesNotThrow(() -> consumer.subscribe(new ListOf<>("fake")));
Assertions.assertDoesNotThrow(
() -> consumer.subscribe(
"TEST-TOPIC"
)
);
}
}

@Disabled
@Test
@Order(3)
void createsProducerAndSendsData() throws IOException {
Expand All @@ -190,54 +172,48 @@ void createsProducerAndSendsData() throws IOException {
Assertions.assertDoesNotThrow(
() -> producer.send(
"fake-key",
new KfData<>("fake-data", "TEST-TOPIC", 1)
new KfData<>("fake-data", "FAKE-TOPIC", 1)
)
);
}
}

@Test
@Order(5)
void createsProducerAndSendsMessage() throws Exception {
final AdminClient admin = AdminClient.create(
ImmutableMap.of(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, EntryTest.servers)
);
final KafkaProducer<String, String> producer = new KafkaProducer<>(
ImmutableMap.of(
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
EntryTest.servers,
ProducerConfig.CLIENT_ID_CONFIG,
UUID.randomUUID().toString()
),
new StringSerializer(),
new StringSerializer()
);
final KafkaConsumer<String, String> consumer = new KafkaConsumer<>(
ImmutableMap.of(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
EntryTest.servers,
ConsumerConfig.GROUP_ID_CONFIG,
"tc-" + UUID.randomUUID(),
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
"earliest"
),
new StringDeserializer(),
new StringDeserializer()
void createsProducerAndSendsMessage() {
final Producer<String, String> producer = new KfProducer<>(
new KfFlexible<>(
new KfProducerParams(
new KfParams(
new BootstrapServers(EntryTest.servers),
new ClientId(UUID.randomUUID().toString()),
new KeySerializer(StringSerializer.class.getName()),
new ValueSerializer(StringSerializer.class.getName())
)
)
)
);
final Collection<NewTopic> topics =
Collections.singletonList(
new NewTopic("TEST-TOPIC", 1, (short) 1)
final Consumer<String, String> consumer =
new KfConsumer<>(
new KfFlexible<>(
new KfConsumerParams(
new KfParams(
new BootstrapServers(EntryTest.servers),
new GroupId("it-" + UUID.randomUUID()),
new AutoOffsetReset("earliest"),
new KeyDeserializer(StringDeserializer.class.getName()),
new ValueDeserializer(StringDeserializer.class.getName())
)
)
)
);
admin.createTopics(topics)
.all().get(30L, TimeUnit.SECONDS);
consumer.subscribe(Collections.singletonList("TEST-TOPIC"));
producer.send(new ProducerRecord<>("TEST-TOPIC", "testcontainers", "rulezzz")).get();
producer.send("testcontainers", new KfData<>("rulezzz", "TEST-TOPIC", 0));
Unreliables.retryUntilTrue(
10,
TimeUnit.SECONDS,
() -> {
final ConsumerRecords<String, String> records =
consumer.poll(Duration.ofMillis(100L));
consumer.records("TEST-TOPIC", Duration.ofMillis(100L));
if (records.isEmpty()) {
return false;
}
Expand All @@ -251,7 +227,6 @@ void createsProducerAndSendsMessage() throws Exception {
consumer.unsubscribe();
}

@Disabled
@Test
@Order(4)
void createsProducerWithCallback() throws IOException {
Expand All @@ -270,13 +245,13 @@ void createsProducerWithCallback() throws IOException {
(recordMetadata, e) ->
MatcherAssert.assertThat(
recordMetadata.topic(),
Matchers.equalTo("TEST-TOPIC")
Matchers.equalTo("TEST-CALLBACK")
)
)
) {
producer.send(
"test-key",
new KfData<>("test-data", "TEST-TOPIC", 1)
new KfData<>("test-data", "TEST-CALLBACK", 1)
);
}
}
Expand Down
12 changes: 6 additions & 6 deletions src/main/java/io/github/eocqrs/kafka/Consumer.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,12 @@

package io.github.eocqrs.kafka;

import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecords;

import java.io.Closeable;
import java.time.Duration;
import java.util.Collection;
import java.util.List;

import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;

/**
* Consumer.
Expand Down Expand Up @@ -63,13 +63,13 @@ public interface Consumer<K, X> extends Closeable {
void subscribe(ConsumerRebalanceListener listener, String... topics);

/**
* Dataized.
* Fetch Records.
*
* @param topic topic to poll
* @param timeout max time to wait
* @return Dataized polled data.
* @return Records.
*/
List<Dataized<X>> iterate(String topic, Duration timeout);
ConsumerRecords<K, X> records(String topic, Duration timeout);

/**
* Unsubscribe.
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/io/github/eocqrs/kafka/Producer.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import java.util.concurrent.Future;
import org.apache.kafka.clients.producer.RecordMetadata;
/**
* @todo #236:30m/DEV Producer send data without partition
* @todo #287:30m/DEV Producer send is not flexible enough
*/

/**
Expand Down
32 changes: 8 additions & 24 deletions src/main/java/io/github/eocqrs/kafka/consumer/KfConsumer.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,19 +24,13 @@

import io.github.eocqrs.kafka.Consumer;
import io.github.eocqrs.kafka.ConsumerSettings;
import io.github.eocqrs.kafka.Dataized;
import io.github.eocqrs.kafka.data.KfData;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.cactoos.list.ListOf;

import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* @todo #236:30m/DEV Unsubscribe is not implemented
*/

/**
* Kafka Consumer.
Expand Down Expand Up @@ -89,25 +83,15 @@ public void subscribe(final ConsumerRebalanceListener listener,
}

/**
* @todo #236:30m/DEV ConsumerRecords data polling
* @todo #289:30m/DEV ConsumerRecords wrapping up
* we have to wrap the ConsumerRecords into some object
*/
@Override
public List<Dataized<X>> iterate(final String topic, final Duration timeout) {
final List<Dataized<X>> accumulator = new ArrayList<>(0);
this.origin
.poll(timeout)
.records(topic)
.forEach(
data ->
accumulator.add(
new KfData<>(
data.value(),
topic,
data.partition()
).dataized()
)
);
return accumulator;
public ConsumerRecords<K, X> records(
final String topic, final Duration timeout
) {
this.subscribe(topic);
return this.origin.poll(timeout);
}

@Override
Expand Down
13 changes: 7 additions & 6 deletions src/main/java/io/github/eocqrs/kafka/fake/FkConsumer.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,11 @@
package io.github.eocqrs.kafka.fake;

import io.github.eocqrs.kafka.Consumer;
import io.github.eocqrs.kafka.Dataized;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecords;

import java.time.Duration;
import java.util.Collection;
import java.util.List;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;

/**
* Fake Consumer.
Expand Down Expand Up @@ -64,11 +63,13 @@ public void subscribe(final ConsumerRebalanceListener listener,
}

/*
* @todo #54:60m/DEV Fake iterate is not implemented
* @todo #54:60m/DEV Fake records is not implemented
*/
@Override
public List<Dataized<X>> iterate(final String topic, final Duration timeout) {
throw new UnsupportedOperationException("#iterate()");
public ConsumerRecords<K, X> records(
final String topic, final Duration timeout
) {
throw new UnsupportedOperationException("#records()");
}

/*
Expand Down
2 changes: 2 additions & 0 deletions src/main/java/io/github/eocqrs/kafka/producer/KfProducer.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@
import io.github.eocqrs.kafka.Data;
import io.github.eocqrs.kafka.Producer;
import io.github.eocqrs.kafka.ProducerSettings;

import java.util.concurrent.Future;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
Expand Down
Loading

6 comments on commit 6eacdd6

@0pdd
Copy link
Collaborator

@0pdd 0pdd commented on 6eacdd6 May 8, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Puzzle 54-18f70e3d disappeared from src/main/java/io/github/eocqrs/kafka/fake/FkConsumer.java), that's why I closed #296. Please, remember that the puzzle was not necessarily removed in this particular commit. Maybe it happened earlier, but we discovered this fact only now.

@0pdd
Copy link
Collaborator

@0pdd 0pdd commented on 6eacdd6 May 8, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Puzzle 81-60413ccb disappeared from src/it/producer-consumer-api/src/test/java/EntryTest.java), that's why I closed #236. Please, remember that the puzzle was not necessarily removed in this particular commit. Maybe it happened earlier, but we discovered this fact only now.

@0pdd
Copy link
Collaborator

@0pdd 0pdd commented on 6eacdd6 May 8, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Puzzle 236-9498fb84 disappeared from src/main/java/io/github/eocqrs/kafka/Producer.java), that's why I closed #287. Please, remember that the puzzle was not necessarily removed in this particular commit. Maybe it happened earlier, but we discovered this fact only now.

@0pdd
Copy link
Collaborator

@0pdd 0pdd commented on 6eacdd6 May 8, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Puzzle 236-63d4f363 disappeared from src/main/java/io/github/eocqrs/kafka/consumer/KfConsumer.java), that's why I closed #288. Please, remember that the puzzle was not necessarily removed in this particular commit. Maybe it happened earlier, but we discovered this fact only now.

@0pdd
Copy link
Collaborator

@0pdd 0pdd commented on 6eacdd6 May 8, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Puzzle 236-0a53974e disappeared from src/main/java/io/github/eocqrs/kafka/consumer/KfConsumer.java), that's why I closed #289. Please, remember that the puzzle was not necessarily removed in this particular commit. Maybe it happened earlier, but we discovered this fact only now.

@0pdd
Copy link
Collaborator

@0pdd 0pdd commented on 6eacdd6 May 8, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I wasn't able to retrieve PDD puzzles from the code base and submit them to github. If you think that it's a bug on our side, please submit it to yegor256/0pdd:

POST https://api.github.com/repos/eo-cqrs/eo-kafka/commits/6eacdd6d52f574ab87eb2502b70cc15477092b5f/comments: 502 - Server Error

Please, copy and paste this stack trace to GitHub:

Octokit::BadGateway
POST https://api.github.com/repos/eo-cqrs/eo-kafka/commits/6eacdd6d52f574ab87eb2502b70cc15477092b5f/comments: 502 - Server Error
/app/vendor/bundle/ruby/2.7.0/gems/octokit-5.6.1/lib/octokit/response/raise_error.rb:14:in `on_complete'
/app/vendor/bundle/ruby/2.7.0/gems/faraday-2.6.0/lib/faraday/middleware.rb:18:in `block in call'
/app/vendor/bundle/ruby/2.7.0/gems/faraday-2.6.0/lib/faraday/response.rb:42:in `on_complete'
/app/vendor/bundle/ruby/2.7.0/gems/faraday-2.6.0/lib/faraday/middleware.rb:17:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/octokit-5.6.1/lib/octokit/middleware/follow_redirects.rb:73:in `perform_with_redirection'
/app/vendor/bundle/ruby/2.7.0/gems/octokit-5.6.1/lib/octokit/middleware/follow_redirects.rb:61:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/faraday-2.6.0/lib/faraday/rack_builder.rb:153:in `build_response'
/app/vendor/bundle/ruby/2.7.0/gems/faraday-2.6.0/lib/faraday/connection.rb:445:in `run_request'
/app/vendor/bundle/ruby/2.7.0/gems/faraday-2.6.0/lib/faraday/connection.rb:281:in `post'
/app/vendor/bundle/ruby/2.7.0/gems/sawyer-0.9.2/lib/sawyer/agent.rb:99:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/octokit-5.6.1/lib/octokit/connection.rb:156:in `request'
/app/vendor/bundle/ruby/2.7.0/gems/octokit-5.6.1/lib/octokit/connection.rb:28:in `post'
/app/vendor/bundle/ruby/2.7.0/gems/octokit-5.6.1/lib/octokit/client/commit_comments.rb:63:in `create_commit_comment'
/app/objects/vcs/github.rb:104:in `create_commit_comment'
/app/objects/tickets/commit_tickets.rb:53:in `close'
/app/objects/tickets/emailed_tickets.rb:64:in `close'
/app/objects/tickets/sentry_tickets.rb:56:in `close'
/app/objects/puzzles.rb:93:in `block in expose'
/app/objects/puzzles.rb:82:in `loop'
/app/objects/puzzles.rb:82:in `expose'
/app/objects/puzzles.rb:44:in `deploy'
/app/objects/jobs/job.rb:38:in `proceed'
/app/objects/jobs/job_starred.rb:32:in `proceed'
/app/objects/jobs/job_recorded.rb:31:in `proceed'
/app/objects/jobs/job_emailed.rb:33:in `proceed'
/app/objects/jobs/job_commiterrors.rb:33:in `proceed'
/app/objects/jobs/job_detached.rb:48:in `exclusive'
/app/objects/jobs/job_detached.rb:36:in `block in proceed'
/app/objects/jobs/job_detached.rb:36:in `fork'
/app/objects/jobs/job_detached.rb:36:in `proceed'
/app/0pdd.rb:530:in `process_request'
/app/0pdd.rb:367:in `block in <top (required)>'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1686:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1686:in `block in compile!'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1023:in `block (3 levels) in route!'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1042:in `route_eval'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1023:in `block (2 levels) in route!'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1071:in `block in process_route'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1069:in `catch'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1069:in `process_route'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1021:in `block in route!'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1018:in `each'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1018:in `route!'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1140:in `block in dispatch!'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1112:in `block in invoke'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1112:in `catch'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1112:in `invoke'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1135:in `dispatch!'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:949:in `block in call!'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1112:in `block in invoke'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1112:in `catch'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1112:in `invoke'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:949:in `call!'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:938:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-2.2.4/lib/rack/deflater.rb:44:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-protection-2.2.2/lib/rack/protection/xss_header.rb:18:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-protection-2.2.2/lib/rack/protection/path_traversal.rb:16:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-protection-2.2.2/lib/rack/protection/json_csrf.rb:26:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-protection-2.2.2/lib/rack/protection/base.rb:50:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-protection-2.2.2/lib/rack/protection/base.rb:50:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-protection-2.2.2/lib/rack/protection/frame_options.rb:31:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-2.2.4/lib/rack/logger.rb:17:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-2.2.4/lib/rack/common_logger.rb:38:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:255:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:248:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-2.2.4/lib/rack/head.rb:12:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-2.2.4/lib/rack/method_override.rb:24:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:218:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1993:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1553:in `block in call'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1769:in `synchronize'
/app/vendor/bundle/ruby/2.7.0/gems/sinatra-2.2.2/lib/sinatra/base.rb:1553:in `call'
/app/vendor/bundle/ruby/2.7.0/gems/rack-2.2.4/lib/rack/handler/webrick.rb:95:in `service'
/app/vendor/ruby-2.7.5/lib/ruby/2.7.0/webrick/httpserver.rb:140:in `service'
/app/vendor/ruby-2.7.5/lib/ruby/2.7.0/webrick/httpserver.rb:96:in `run'
/app/vendor/ruby-2.7.5/lib/ruby/2.7.0/webrick/server.rb:307:in `block in start_thread'

Please sign in to comment.