Skip to content

Commit

Permalink
Use GrokPatternRegistry to reload saved grok patterns (#5982)
Browse files Browse the repository at this point in the history
* Use GrokPatternRegistry to reload saved grok patterns

Prior to this change, a edit on a grok pattern did not
reflect on the used pattern in GrokExtractors

This change uses the GrokPatternRegistry which is
contains all grok patterns and is connected to the
cluster event bus and reloads when a grok pattern is
edit.

One test needed to be moved since it was testing the
grok pattern service which is not used anymore in the
GrokExtractor directly. There for the test was moved
to the MongoDbGrokPatternServiceTest where it belongs.

Fixes #5833
  • Loading branch information
kmerz authored and bernd committed Jul 8, 2019
1 parent fc84262 commit fbc37ed
Show file tree
Hide file tree
Showing 5 changed files with 159 additions and 158 deletions.
Expand Up @@ -18,7 +18,7 @@

import com.codahale.metrics.MetricRegistry;
import org.graylog2.ConfigurationException;
import org.graylog2.grok.GrokPatternService;
import org.graylog2.grok.GrokPatternRegistry;
import org.graylog2.lookup.LookupTableService;
import org.graylog2.plugin.inputs.Converter;
import org.graylog2.plugin.inputs.Extractor;
Expand All @@ -29,13 +29,13 @@

public class ExtractorFactory {
private final MetricRegistry metricRegistry;
private final GrokPatternService grokPatternService;
private final GrokPatternRegistry grokPatternRegistry;
private final LookupTableService lookupTableService;

@Inject
public ExtractorFactory(MetricRegistry metricRegistry, GrokPatternService grokPatternService, LookupTableService lookupTableService) {
public ExtractorFactory(MetricRegistry metricRegistry, GrokPatternRegistry grokPatternRegistry, LookupTableService lookupTableService) {
this.metricRegistry = metricRegistry;
this.grokPatternService = grokPatternService;
this.grokPatternRegistry = grokPatternRegistry;
this.lookupTableService = lookupTableService;
}

Expand Down Expand Up @@ -65,7 +65,7 @@ public Extractor factory(String id,
case REGEX_REPLACE:
return new RegexReplaceExtractor(metricRegistry, id, title, order, cursorStrategy, sourceField, targetField, extractorConfig, creatorUserId, converters, conditionType, conditionValue);
case GROK:
return new GrokExtractor(metricRegistry, grokPatternService.loadAll(), id, title, order, cursorStrategy, sourceField, targetField, extractorConfig, creatorUserId, converters, conditionType, conditionValue);
return new GrokExtractor(metricRegistry, grokPatternRegistry, id, title, order, cursorStrategy, sourceField, targetField, extractorConfig, creatorUserId, converters, conditionType, conditionValue);
case JSON:
return new JsonExtractor(metricRegistry, id, title, order, cursorStrategy, sourceField, targetField, extractorConfig, creatorUserId, converters, conditionType, conditionValue);
case LOOKUP_TABLE:
Expand Down
Expand Up @@ -19,41 +19,36 @@
import com.codahale.metrics.MetricRegistry;
import com.google.common.base.Strings;
import io.krakens.grok.api.Grok;
import io.krakens.grok.api.GrokCompiler;
import io.krakens.grok.api.Match;
import io.krakens.grok.api.exception.GrokException;
import org.graylog2.ConfigurationException;
import org.graylog2.grok.GrokPattern;
import org.graylog2.grok.GrokPatternRegistry;
import org.graylog2.plugin.inputs.Converter;
import org.graylog2.plugin.inputs.Extractor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;

public class GrokExtractor extends Extractor {
public static final String CONFIG_GROK_PATTERN = "grok_pattern";
private static final Logger log = LoggerFactory.getLogger(GrokExtractor.class);

private final Grok grok;
private final GrokCompiler grokCompiler = GrokCompiler.newInstance();
private GrokPatternRegistry grokPatternRegistry;
private String pattern;
private boolean namedCapturesOnly;

public GrokExtractor(MetricRegistry metricRegistry,
Set<GrokPattern> grokPatterns,
String id,
String title,
long order,
CursorStrategy cursorStrategy,
String sourceField,
String targetField,
Map<String, Object> extractorConfig,
String creatorUserId,
List<Converter> converters,
ConditionType conditionType,
String conditionValue) throws ReservedFieldException, ConfigurationException {
GrokExtractor(MetricRegistry metricRegistry,
GrokPatternRegistry grokPatternRegistry,
String id,
String title,
long order,
CursorStrategy cursorStrategy,
String sourceField,
String targetField,
Map<String, Object> extractorConfig,
String creatorUserId,
List<Converter> converters,
ConditionType conditionType,
String conditionValue) throws ReservedFieldException, ConfigurationException {
super(metricRegistry,
id,
title,
Expand All @@ -71,23 +66,14 @@ public GrokExtractor(MetricRegistry metricRegistry,
throw new ConfigurationException("grok_pattern not set");
}

final boolean namedCapturesOnly = (boolean) extractorConfig.getOrDefault("named_captures_only", false);

try {
// TODO we should really share this somehow, but unfortunately the extractors are reloaded every second.
for (final GrokPattern grokPattern : grokPatterns) {
grokCompiler.register(grokPattern.name(), grokPattern.pattern());
}

grok = grokCompiler.compile((String) extractorConfig.get(CONFIG_GROK_PATTERN), namedCapturesOnly);
} catch (GrokException e) {
log.error("Unable to parse grok patterns", e);
throw new ConfigurationException("Unable to parse grok patterns");
}
this.grokPatternRegistry = grokPatternRegistry;
this.pattern = (String) extractorConfig.get(CONFIG_GROK_PATTERN);
this.namedCapturesOnly = (boolean) extractorConfig.getOrDefault("named_captures_only", false);
}

@Override
protected Result[] run(String value) {
final Grok grok = grokPatternRegistry.cachedGrokForPattern(this.pattern, this.namedCapturesOnly);

// the extractor instance is rebuilt every second anyway
final Match match = grok.match(value);
Expand Down
Expand Up @@ -21,6 +21,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.eventbus.EventBus;
import com.google.common.graph.Graph;
import com.lordofthejars.nosqlunit.annotation.UsingDataSet;
import com.lordofthejars.nosqlunit.core.LoadStrategyEnum;
Expand All @@ -46,6 +47,7 @@
import org.graylog2.database.NotFoundException;
import org.graylog2.events.ClusterEventBus;
import org.graylog2.grok.GrokPattern;
import org.graylog2.grok.GrokPatternRegistry;
import org.graylog2.grok.GrokPatternService;
import org.graylog2.grok.InMemoryGrokPatternService;
import org.graylog2.inputs.Input;
Expand Down Expand Up @@ -127,8 +129,6 @@ public class InputFacadeTest {

private InputService inputService;
private InputFacade facade;
private Set<PluginMetaData> pluginMetaData;
private Map<String, MessageInput.Factory<? extends MessageInput>> inputFactories;

@Before
@SuppressForbidden("Using Executors.newSingleThreadExecutor() is okay in tests")
Expand All @@ -137,12 +137,16 @@ public void setUp() throws Exception {
final ClusterEventBus clusterEventBus = new ClusterEventBus("cluster-event-bus", Executors.newSingleThreadExecutor());
final GrokPatternService grokPatternService = new InMemoryGrokPatternService(clusterEventBus);
grokPatternService.save(GrokPattern.create("GREEDY", ".*"));
final ExtractorFactory extractorFactory = new ExtractorFactory(metricRegistry, grokPatternService, lookupTableService);
final EventBus clusterBus = new EventBus();
final GrokPatternRegistry grokPatternRegistry = new GrokPatternRegistry(clusterBus,
grokPatternService,
Executors.newScheduledThreadPool(1));
final ExtractorFactory extractorFactory = new ExtractorFactory(metricRegistry, grokPatternRegistry, lookupTableService);
final ConverterFactory converterFactory = new ConverterFactory(lookupTableService);
inputService = new InputServiceImpl(mongoRule.getMongoConnection(), extractorFactory, converterFactory, messageInputFactory, clusterEventBus);
final InputRegistry inputRegistry = new InputRegistry();
pluginMetaData = new HashSet<>();
inputFactories = new HashMap<>();
Set<PluginMetaData> pluginMetaData = new HashSet<>();
Map<String, MessageInput.Factory<? extends MessageInput>> inputFactories = new HashMap<>();
final FakeHttpMessageInput.Factory fakeHttpMessageInputFactory = mock(FakeHttpMessageInput.Factory.class);
final FakeHttpMessageInput.Descriptor fakeHttpMessageInputDescriptor = mock(FakeHttpMessageInput.Descriptor.class);
when(fakeHttpMessageInputFactory.getDescriptor()).thenReturn(fakeHttpMessageInputDescriptor);
Expand Down

0 comments on commit fbc37ed

Please sign in to comment.