Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).

## 2.0.0 (UNRELEASED)

### Breaking Changes
- Updated SpringBoot framework from 1.5.x to 2.0.4
TODO Write migration guide

## 1.0.5 (06/22/2018)
- [Issue#75](https://github.com/SourceLabOrg/kafka-webview/issues/75) Bugfix Add Jackson serializer that falls back to using toString() when consuming entries from Kafka.
- [Issue#72](https://github.com/SourceLabOrg/kafka-webview/issues/72) Bugfix User role input not displayed when creating new user.
Expand Down
2 changes: 1 addition & 1 deletion kafka-webview-plugin/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<parent>
<groupId>org.sourcelab</groupId>
<artifactId>kafka-webview</artifactId>
<version>1.0.5</version>
<version>2.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>kafka-webview-plugin</artifactId>
Expand Down
12 changes: 9 additions & 3 deletions kafka-webview-ui/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
<parent>
<artifactId>kafka-webview</artifactId>
<groupId>org.sourcelab</groupId>
<version>1.0.5</version>
<version>2.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>kafka-webview-ui</artifactId>
<version>1.0.5</version>
<version>2.0.0</version>

<!-- Module Description and Ownership -->
<name>Kafka WebView UI</name>
Expand Down Expand Up @@ -111,6 +111,12 @@
<version>1.2.1</version>
</dependency>

<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.3.0</version>
</dependency>

<!-- Allows for auto-reloading resources -->
<dependency>
<groupId>org.springframework.boot</groupId>
Expand All @@ -134,7 +140,7 @@
<dependency>
<groupId>com.salesforce.kafka.test</groupId>
<artifactId>kafka-junit4</artifactId>
<version>3.0.0</version>
<version>3.0.1</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
8 changes: 4 additions & 4 deletions kafka-webview-ui/src/assembly/distribution/config.yml
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
server:
port: 8080

security:
require-ssl: false

## Various App Configs
app:
## Should be unique to your installation.
## This key will be used for symmetric encryption of JKS/TrustStore secrets if you configure any SSL enabled Kafka clusters.
key: "SuperSecretKey"

## Defines a prefix prepended to the Id of all consumers.
consumerIdPrefix: "KafkaWebViewConsumer"
consumerIdPrefix: "KafkaWebViewConsumer"

## Require SSL
require-ssl: false
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ public class AppProperties {
@Value("${app.consumerIdPrefix}")
private String consumerIdPrefix;

@Value("${app.require_ssl:false}")
private boolean requireSsl = false;

public String getName() {
return name;
}
Expand All @@ -68,6 +71,10 @@ public String getConsumerIdPrefix() {
return consumerIdPrefix;
}

public boolean isRequireSsl() {
return requireSsl;
}

@Override
public String toString() {
return "AppProperties{"
Expand All @@ -76,6 +83,7 @@ public String toString() {
+ ", appKey='XXXXXX'"
+ ", maxConcurrentWebSocketConsumers=" + maxConcurrentWebSocketConsumers
+ ", consumerIdPrefix='" + consumerIdPrefix + '\''
+ ", requireSsl='" + requireSsl + '\''
+ '}';
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,9 @@
import org.sourcelab.kafka.webview.ui.manager.user.CustomUserDetailsService;
import org.sourcelab.kafka.webview.ui.repository.UserRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.security.SecurityProperties;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.annotation.Order;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
Expand All @@ -45,14 +44,17 @@
*/
@Configuration
@EnableWebSecurity
@Order(SecurityProperties.ACCESS_OVERRIDE_ORDER)
public class SecurityConfig extends WebSecurityConfigurerAdapter {

@Autowired
private UserRepository userRepository;

@Autowired
private SecurityProperties securityProperties;
/**
* Allows for requiring all requests over SSL.
* If not defined in the config under the key security.require_ssl, we default to false.
*/
@Value("${app.require_ssl:false}")
private boolean isRequireSsl;

private final BCryptPasswordEncoder passwordEncoder = new BCryptPasswordEncoder();

Expand Down Expand Up @@ -99,7 +101,7 @@ protected void configure(final HttpSecurity http) throws Exception {
.permitAll();

// If require SSL is enabled
if (securityProperties.isRequireSsl()) {
if (isRequireSsl) {
// Ensure its enabled.
http
.requiresChannel()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;

/**
Expand Down Expand Up @@ -97,25 +98,11 @@ public KafkaResults consume(
@PathVariable final Long id,
@RequestBody final ConsumeRequest consumeRequest) {

// How many results per partition to consume
// Null means use whatever is configured in the view.
final Integer resultsPerPartition = consumeRequest.getResultsPerPartition();

// Comma separated list of partitions to consume from.
// Null or empty string means use whatever is configured in the View.
final String partitions = consumeRequest.getPartitions();

// Action describes what to consume 'next', 'prev', 'head', 'tail'
final String action = consumeRequest.getAction();

// Any custom configured filters
final List<ConsumeRequest.Filter> requestFilters = consumeRequest.getFilters();

// Retrieve the view definition
final View view = viewRepository.findOne(id);
if (view == null) {
throw new NotFoundApiException("Consume", "Unable to find view");
}
final View view = retrieveViewById(id);

// Override settings
final ViewCustomizer viewCustomizer = new ViewCustomizer(view, consumeRequest);
Expand Down Expand Up @@ -149,11 +136,8 @@ public KafkaResults consume(
@ResponseBody
@RequestMapping(path = "/consumer/view/{id}/offsets", method = RequestMethod.POST, produces = "application/json")
public ConsumerState setConsumerOffsets(@PathVariable final Long id, @RequestBody final Map<Integer, Long> partitionOffsetMap) {
// Retrieve the view definition
final View view = viewRepository.findOne(id);
if (view == null) {
throw new NotFoundApiException("Offsets", "Unable to find view");
}
// Retrieve View
final View view = retrieveViewById(id);

// Create consumer
try (final WebKafkaConsumer webKafkaConsumer = setup(view, new HashSet<>())) {
Expand All @@ -169,11 +153,8 @@ public ConsumerState setConsumerOffsets(@PathVariable final Long id, @RequestBo
@ResponseBody
@RequestMapping(path = "/consumer/view/{id}/timestamp/{timestamp}", method = RequestMethod.POST, produces = "application/json")
public ConsumerState setConsumerOffsetsByTimestamp(@PathVariable final Long id, @PathVariable final Long timestamp) {
// Retrieve the view definition
final View view = viewRepository.findOne(id);
if (view == null) {
throw new NotFoundApiException("OffsetsByTimestamp", "Unable to find view");
}
// Retrieve View
final View view = retrieveViewById(id);

// Create consumer
try (final WebKafkaConsumer webKafkaConsumer = setup(view, new HashSet<>())) {
Expand All @@ -190,10 +171,7 @@ public ConsumerState setConsumerOffsetsByTimestamp(@PathVariable final Long id,
@RequestMapping(path = "/view/{id}/partitions", method = RequestMethod.GET, produces = "application/json")
public Collection<Integer> getPartitionsForView(@PathVariable final Long id) {
// Retrieve View
final View view = viewRepository.findOne(id);
if (view == null) {
throw new NotFoundApiException("Partitions", "Unable to find view");
}
final View view = retrieveViewById(id);

// If the view has defined partitions, we'll return them
if (!view.getPartitionsAsSet().isEmpty()) {
Expand Down Expand Up @@ -221,10 +199,7 @@ public Collection<Integer> getPartitionsForView(@PathVariable final Long id) {
@RequestMapping(path = "/cluster/{id}/topics/list", method = RequestMethod.GET, produces = "application/json")
public List<TopicListing> getTopics(@PathVariable final Long id) {
// Retrieve cluster
final Cluster cluster = clusterRepository.findOne(id);
if (cluster == null) {
throw new NotFoundApiException("Topics", "Unable to find cluster");
}
final Cluster cluster = retrieveClusterById(id);

// Create new Operational Client
try (final KafkaOperations operations = createOperationsClient(cluster)) {
Expand All @@ -242,10 +217,7 @@ public List<TopicListing> getTopics(@PathVariable final Long id) {
@RequestMapping(path = "/cluster/{id}/topic/{topic}/details", method = RequestMethod.GET, produces = "application/json")
public TopicDetails getTopicDetails(@PathVariable final Long id, @PathVariable final String topic) {
// Retrieve cluster
final Cluster cluster = clusterRepository.findOne(id);
if (cluster == null) {
throw new NotFoundApiException("TopicDetails", "Unable to find cluster");
}
final Cluster cluster = retrieveClusterById(id);

// Create new Operational Client
try (final KafkaOperations operations = createOperationsClient(cluster)) {
Expand All @@ -262,10 +234,7 @@ public TopicDetails getTopicDetails(@PathVariable final Long id, @PathVariable f
@RequestMapping(path = "/cluster/{id}/topic/{topic}/config", method = RequestMethod.GET, produces = "application/json")
public List<ConfigItem> getTopicConfig(@PathVariable final Long id, @PathVariable final String topic) {
// Retrieve cluster
final Cluster cluster = clusterRepository.findOne(id);
if (cluster == null) {
throw new NotFoundApiException("TopicConfig", "Unable to find cluster");
}
final Cluster cluster = retrieveClusterById(id);

// Create new Operational Client
try (final KafkaOperations operations = createOperationsClient(cluster)) {
Expand All @@ -282,10 +251,7 @@ public List<ConfigItem> getTopicConfig(@PathVariable final Long id, @PathVariabl
@RequestMapping(path = "/cluster/{id}/broker/{brokerId}/config", method = RequestMethod.GET, produces = "application/json")
public List<ConfigItem> getBrokerConfig(@PathVariable final Long id, @PathVariable final String brokerId) {
// Retrieve cluster
final Cluster cluster = clusterRepository.findOne(id);
if (cluster == null) {
throw new NotFoundApiException("TopicConfig", "Unable to find cluster");
}
final Cluster cluster = retrieveClusterById(id);

// Create new Operational Client
try (final KafkaOperations operations = createOperationsClient(cluster)) {
Expand All @@ -302,10 +268,7 @@ public List<ConfigItem> getBrokerConfig(@PathVariable final Long id, @PathVariab
@RequestMapping(path = "/cluster/{id}/topics/details", method = RequestMethod.GET, produces = "application/json")
public Collection<TopicDetails> getAllTopicsDetails(@PathVariable final Long id) {
// Retrieve cluster
final Cluster cluster = clusterRepository.findOne(id);
if (cluster == null) {
throw new NotFoundApiException("TopicDetails", "Unable to find cluster");
}
final Cluster cluster = retrieveClusterById(id);

// Create new Operational Client
try (final KafkaOperations operations = createOperationsClient(cluster)) {
Expand All @@ -329,10 +292,7 @@ public Collection<TopicDetails> getAllTopicsDetails(@PathVariable final Long id)
@RequestMapping(path = "/cluster/{id}/nodes", method = RequestMethod.GET, produces = "application/json")
public List<NodeDetails> getClusterNodes(@PathVariable final Long id) {
// Retrieve cluster
final Cluster cluster = clusterRepository.findOne(id);
if (cluster == null) {
throw new NotFoundApiException("ClusterNodes", "Unable to find cluster");
}
final Cluster cluster = retrieveClusterById(id);

try (final KafkaOperations operations = createOperationsClient(cluster)) {
final NodeList nodes = operations.getClusterNodes();
Expand All @@ -349,10 +309,7 @@ public List<NodeDetails> getClusterNodes(@PathVariable final Long id) {
@RequestMapping(path = "/filter/{id}/options", method = RequestMethod.GET, produces = "application/json")
public String[] getFilterOptions(@PathVariable final Long id) {
// Retrieve Filter
final Filter filter = filterRepository.findOne(id);
if (filter == null) {
throw new NotFoundApiException("FilterOptions", "Unable to find filter");
}
final Filter filter = retrieveFilterById(id);
final String[] options = filter.getOptions().split(",");

return options;
Expand Down Expand Up @@ -391,4 +348,54 @@ private WebKafkaConsumer setup(final View view, final Collection<FilterDefinitio
public void addAttributes(final Model model) {
// Do nothing.
}

/**
* Helper method to retrieve a cluster by its Id. If its not found it will throw the appropriate
* NotFoundApiException exception.
*
* @param id id of cluster to retrieve
* @return the cluster entity.
* @throws NotFoundApiException if not found.
*/
private Cluster retrieveClusterById(final Long id) throws NotFoundApiException {
final Optional<Cluster> clusterOptional = clusterRepository.findById(id);
if (!clusterOptional.isPresent()) {
throw new NotFoundApiException("TopicConfig", "Unable to find cluster");
}
return clusterOptional.get();
}

/**
* Helper method to retrieve a view by its Id. If its not found it will throw the appropriate
* NotFoundApiException exception.
*
* @param id id of view to retrieve
* @return the view entity.
* @throws NotFoundApiException if not found.
*/
private View retrieveViewById(final Long id) throws NotFoundApiException {
// Retrieve View
final Optional<View> viewOptional = viewRepository.findById(id);
if (!viewOptional.isPresent()) {
throw new NotFoundApiException("Partitions", "Unable to find view");
}
return viewOptional.get();
}

/**
* Helper method to retrieve a filter by its Id. If its not found it will throw the appropriate
* NotFoundApiException exception.
*
* @param id id of filter to retrieve
* @return the filter entity.
* @throws NotFoundApiException if not found.
*/
private Filter retrieveFilterById(final Long id) throws NotFoundApiException {
// Retrieve Filter
final Optional<Filter> filterOptional = filterRepository.findById(id);
if (!filterOptional.isPresent()) {
throw new NotFoundApiException("FilterOptions", "Unable to find filter");
}
return filterOptional.get();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@

import java.util.HashMap;
import java.util.Map;
import java.util.Optional;

/**
* Controller for viewing Cluster details.
Expand Down Expand Up @@ -164,8 +165,8 @@ public String readTopic(

private Cluster retrieveCluster(final Long id, final RedirectAttributes redirectAttributes) {
// Retrieve by id
final Cluster cluster = clusterRepository.findOne(id);
if (cluster == null) {
final Optional<Cluster> clusterOptional = clusterRepository.findById(id);
if (!clusterOptional.isPresent()) {
// redirect
// Set flash message
final FlashMessage flashMessage = FlashMessage.newWarning("Unable to find cluster!");
Expand All @@ -174,7 +175,7 @@ private Cluster retrieveCluster(final Long id, final RedirectAttributes redirect
// redirect to cluster index
return null;
}
return cluster;
return clusterOptional.get();
}

private BreadCrumbManager setupBreadCrumbs(final Model model) {
Expand Down
Loading