Skip to content

Commit

Permalink
Remove noisy metrics logging (#13)
Browse files Browse the repository at this point in the history
- Metrics are great, but in this case they're making the demo harder to grok
  • Loading branch information
thody committed Jan 19, 2017
1 parent a5c780d commit 41fb2d4
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 38 deletions.
16 changes: 2 additions & 14 deletions src/main/java/com/heroku/kafka/demo/DemoApplication.java
@@ -1,19 +1,15 @@
package com.heroku.kafka.demo;

import com.codahale.metrics.Slf4jReporter;
import io.dropwizard.Application;
import io.dropwizard.assets.AssetsBundle;
import io.dropwizard.configuration.EnvironmentVariableSubstitutor;
import io.dropwizard.configuration.SubstitutingSourceProvider;
import io.dropwizard.setup.Bootstrap;
import io.dropwizard.setup.Environment;
import io.dropwizard.views.ViewBundle;
import org.glassfish.jersey.linking.DeclarativeLinkingFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.concurrent.TimeUnit;

public class DemoApplication extends Application<DemoConfiguration> {
private static final Logger LOG = LoggerFactory.getLogger(DemoApplication.class);

Expand All @@ -38,21 +34,13 @@ public void initialize(Bootstrap<DemoConfiguration> bootstrap) {

@Override
public void run(DemoConfiguration config, Environment env) throws Exception {
DemoProducer producer = new DemoProducer(config.getKafkaConfig(), env.metrics());
DemoConsumer consumer = new DemoConsumer(config.getKafkaConfig(), env.metrics());
DemoProducer producer = new DemoProducer(config.getKafkaConfig());
DemoConsumer consumer = new DemoConsumer(config.getKafkaConfig());

env.lifecycle().manage(producer);
env.lifecycle().manage(consumer);

env.jersey().register(DeclarativeLinkingFeature.class);
env.jersey().register(new DemoResource(producer, consumer));

final Slf4jReporter reporter = Slf4jReporter.forRegistry(env.metrics())
.outputTo(LOG)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.build();

reporter.start(1, TimeUnit.MINUTES);
}
}
17 changes: 5 additions & 12 deletions src/main/java/com/heroku/kafka/demo/DemoConsumer.java
@@ -1,7 +1,5 @@
package com.heroku.kafka.demo;

import com.codahale.metrics.Gauge;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.Lists;
import io.dropwizard.lifecycle.Managed;
import org.apache.kafka.clients.consumer.ConsumerConfig;
Expand All @@ -15,7 +13,10 @@
import java.util.List;
import java.util.Properties;
import java.util.Queue;
import java.util.concurrent.*;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;

import static java.util.Collections.singletonList;
Expand All @@ -27,8 +28,6 @@ public class DemoConsumer implements Managed {

private final KafkaConfig config;

private final MetricRegistry metrics;

private ExecutorService executor;

private final AtomicBoolean running = new AtomicBoolean();
Expand All @@ -39,9 +38,8 @@ public class DemoConsumer implements Managed {

private final Queue<DemoMessage> queue = new ArrayBlockingQueue<>(CAPACITY);

public DemoConsumer(KafkaConfig config, MetricRegistry metrics) {
public DemoConsumer(KafkaConfig config) {
this.config = config;
this.metrics = metrics;
}

@Override
Expand All @@ -63,11 +61,6 @@ private void loop() {

consumer = new KafkaConsumer<>(properties);
consumer.subscribe(singletonList(config.getTopic()));

consumer.metrics().forEach((name, metric) -> {
Gauge<Double> gauge = () -> metric.value();
metrics.register(MetricRegistry.name(DemoConsumer.class, name.name()), gauge);
});
LOG.info("started");

do {
Expand Down
14 changes: 2 additions & 12 deletions src/main/java/com/heroku/kafka/demo/DemoProducer.java
@@ -1,7 +1,5 @@
package com.heroku.kafka.demo;

import com.codahale.metrics.Gauge;
import com.codahale.metrics.MetricRegistry;
import io.dropwizard.lifecycle.Managed;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
Expand All @@ -15,14 +13,11 @@ public class DemoProducer implements Managed {
private static final Logger LOG = LoggerFactory.getLogger(DemoProducer.class);

private final KafkaConfig config;

private final MetricRegistry metrics;


private Producer<String, String> producer;

public DemoProducer(KafkaConfig config, MetricRegistry metrics) {
public DemoProducer(KafkaConfig config) {
this.config = config;
this.metrics = metrics;
}

public void start() throws Exception {
Expand All @@ -37,11 +32,6 @@ public void start() throws Exception {
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

producer = new KafkaProducer<>(properties);

producer.metrics().forEach((name, metric) -> {
Gauge<Double> gauge = () -> metric.value();
metrics.register(MetricRegistry.name(DemoProducer.class, name.name()), gauge);
});
LOG.info("started");
}

Expand Down

0 comments on commit 41fb2d4

Please sign in to comment.