Skip to content

Commit

Permalink
Merge branch 'stream_based_dwh_config' into stream-datawarehouse
Browse files Browse the repository at this point in the history
  • Loading branch information
ousmaneo committed Jun 4, 2024
2 parents fdcccee + c99a1c6 commit 6b5edcc
Show file tree
Hide file tree
Showing 118 changed files with 2,180 additions and 1,223 deletions.
5 changes: 5 additions & 0 deletions changelog/unreleased/issue-17730.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
type="f"
message="Display Data Node Logs"

issues=["17730"]
pulls=["19352"]
5 changes: 5 additions & 0 deletions changelog/unreleased/issue-18932.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
type="f"
message="Fix selecting option in more results select on pipeline simulator page."

issues=["18932"]
pulls=["19413"]
6 changes: 6 additions & 0 deletions changelog/unreleased/issue-19229.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

type="f"
message="Data Node Migration: remove unnecessary steps/big texts/unavailable paths"

issues=["19229"]
pulls=["19312"]
6 changes: 6 additions & 0 deletions changelog/unreleased/issue-19313.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
type = "fixed"
message = "Fixes Recent Activity pagination, so that it does not ignore entries for deleted items."

issues = ["19313"]
pulls = ["19394"]

8 changes: 8 additions & 0 deletions changelog/unreleased/pr-19404.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
type = "f"
message = "Removed `Delete` and `Duplicate` options for Sigma event definitions."

pulls = ["19404"]

details.user = """
Sigma event definitions are automatically deleted or duplicated when their parent Sigma rule is deleted or duplicated.
"""
5 changes: 5 additions & 0 deletions changelog/unreleased/pr-19439.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
type = "fixed"
message = "Fixed error when the string_array_add function is supplied a null value."

pulls = ["19439"]
issues = ["graylog-plugin-enterprise#7262"]
4 changes: 2 additions & 2 deletions data-node/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -521,7 +521,7 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>3.5.0</version>
<version>3.6.0</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
Expand All @@ -531,7 +531,7 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>3.2.0</version>
<version>3.3.0</version>
</plugin>
</plugins>
</pluginManagement>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
package org.graylog.storage.elasticsearch7;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.joschi.jadconfig.util.Duration;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
Expand Down Expand Up @@ -84,6 +83,7 @@
import org.slf4j.LoggerFactory;

import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
Expand Down Expand Up @@ -115,7 +115,6 @@ public IndicesAdapterES7(ElasticsearchClient client,
ClusterStatsApi clusterStatsApi,
CatApi catApi,
ClusterStateApi clusterStateApi,
ObjectMapper objectMapper,
IndexTemplateAdapter indexTemplateAdapter) {
this.client = client;
this.statsApi = statsApi;
Expand Down Expand Up @@ -159,13 +158,28 @@ public Set<String> resolveAlias(String alias) {

@Override
public void create(String index, IndexSettings indexSettings) {
executeCreateIndexRequest(index, createIndexRequest(index, indexSettings, null));
}

@Override
public void create(String index, IndexSettings indexSettings, @Nullable Map<String, Object> mapping) {
executeCreateIndexRequest(index, createIndexRequest(index, indexSettings, mapping));
}

private CreateIndexRequest createIndexRequest(String index,
IndexSettings indexSettings,
@Nullable Map<String, Object> mapping) {
final Map<String, Object> settings = new HashMap<>();
settings.put("number_of_shards", indexSettings.shards());
settings.put("number_of_replicas", indexSettings.replicas());
CreateIndexRequest request = new CreateIndexRequest(index).settings(settings);
if (mapping != null) {
request = request.mapping(mapping);
}
return request;
}

final CreateIndexRequest request = new CreateIndexRequest(index)
.settings(settings);

private void executeCreateIndexRequest(String index, CreateIndexRequest request) {
client.execute((c, requestOptions) -> c.indices().create(request, requestOptions),
"Unable to create index " + index);
}
Expand All @@ -182,6 +196,18 @@ public void updateIndexMapping(@Nonnull String indexName,
"Unable to update index mapping " + indexName);
}

@Override
public Map<String, Object> getIndexMapping(@Nonnull String index) {
final GetMappingsRequest request = new GetMappingsRequest()
.indices(index)
.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN);

final GetMappingsResponse result = client.execute((c, requestOptions) -> c.indices().getMapping(request, requestOptions),
"Couldn't read mapping of index " + index);

return result.mappings().get(index).sourceAsMap();
}

@Override
public void updateIndexMetaData(@Nonnull String index, @Nonnull Map<String, Object> metadata, boolean mergeExisting) {
Map<String, Object> metaUpdate = new HashMap<>();
Expand All @@ -195,14 +221,7 @@ public void updateIndexMetaData(@Nonnull String index, @Nonnull Map<String, Obje

@Override
public Map<String, Object> getIndexMetaData(@Nonnull String index) {
final GetMappingsRequest request = new GetMappingsRequest()
.indices(index)
.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN);

final GetMappingsResponse result = client.execute((c, requestOptions) -> c.indices().getMapping(request, requestOptions),
"Couldn't read mapping of index " + index);

final Object metaData = result.mappings().get(index).sourceAsMap().get("_meta");
final Object metaData = getIndexMapping(index).get("_meta");
//noinspection rawtypes
if (metaData instanceof Map map) {
//noinspection unchecked
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,9 @@ private SearchSourceBuilder searchSourceBuilderFrom(ExportMessagesCommand comman
SearchSourceBuilder ssb = new SearchSourceBuilder()
.query(query)
.size(command.chunkSize());
if (!command.exportAllFields()) {
ssb = ssb.fetchSource(command.fieldsInOrder().toArray(new String[]{}), null);
}

return requestStrategy.configure(ssb);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ public IndicesAdapter indicesAdapter() {
new ClusterStatsApi(objectMapper, new PlainJsonApi(objectMapper, client)),
new CatApi(objectMapper, client),
new ClusterStateApi(objectMapper, client),
objectMapper,
new ComposableIndexTemplateAdapter(client, objectMapper)
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package org.graylog.storage.elasticsearch7.views.export;

import com.google.common.collect.ImmutableSet;
import jakarta.annotation.Nonnull;
import org.graylog.plugins.views.search.elasticsearch.ElasticsearchQueryString;
import org.graylog.plugins.views.search.elasticsearch.IndexLookup;
import org.graylog.plugins.views.search.export.ExportException;
Expand All @@ -32,7 +33,6 @@
import org.graylog2.indexer.ElasticsearchException;
import org.graylog2.plugin.Message;
import org.graylog2.plugin.indexer.searches.timeranges.AbsoluteRange;
import jakarta.annotation.Nonnull;
import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.Before;
Expand Down Expand Up @@ -194,11 +194,11 @@ public void deliversCompleteLastChunkIfLimitIsReached() {
}

@Test
public void resultsHaveAllMessageFields() {
public void resultsHaveAllMessageFieldsIfFieldsHaveNotBeenExplicitlyChosen() {
importFixture("messages.json");

ExportMessagesCommand command = helper.commandBuilderWithAllTestDefaultStreams()
.fieldsInOrder("timestamp", "message")
.fieldsInOrder(ExportMessagesCommand.ALL_FIELDS)
.build();

LinkedHashSet<SimpleMessageChunk> allChunks = helper.collectChunksFor(command);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@
import org.slf4j.LoggerFactory;

import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
Expand Down Expand Up @@ -158,13 +159,28 @@ public Set<String> resolveAlias(String alias) {

@Override
public void create(String index, IndexSettings indexSettings) {
executeCreateIndexRequest(index, createIndexRequest(index, indexSettings, null));
}

@Override
public void create(String index, IndexSettings indexSettings, @Nullable Map<String, Object> mapping) {
executeCreateIndexRequest(index, createIndexRequest(index, indexSettings, mapping));
}

private CreateIndexRequest createIndexRequest(String index,
IndexSettings indexSettings,
@Nullable Map<String, Object> mapping) {
final Map<String, Object> settings = new HashMap<>();
settings.put("number_of_shards", indexSettings.shards());
settings.put("number_of_replicas", indexSettings.replicas());
CreateIndexRequest request = new CreateIndexRequest(index).settings(settings);
if (mapping != null) {
request = request.mapping(mapping);
}
return request;
}

final CreateIndexRequest request = new CreateIndexRequest(index)
.settings(settings);

private void executeCreateIndexRequest(String index, CreateIndexRequest request) {
client.execute((c, requestOptions) -> c.indices().create(request, requestOptions),
"Unable to create index " + index);
}
Expand All @@ -181,6 +197,18 @@ public void updateIndexMapping(@Nonnull String indexName,
"Unable to update index mapping " + indexName);
}

@Override
public Map<String, Object> getIndexMapping(@Nonnull String index) {
final GetMappingsRequest request = new GetMappingsRequest()
.indices(index)
.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN);

final GetMappingsResponse result = client.execute((c, requestOptions) -> c.indices().getMapping(request, requestOptions),
"Couldn't read mapping of index " + index);

return result.mappings().get(index).sourceAsMap();
}

@Override
public void updateIndexMetaData(@Nonnull String index, @Nonnull Map<String, Object> metadata, boolean mergeExisting) {
Map<String, Object> metaUpdate = new HashMap<>();
Expand All @@ -194,14 +222,7 @@ public void updateIndexMetaData(@Nonnull String index, @Nonnull Map<String, Obje

@Override
public Map<String, Object> getIndexMetaData(@Nonnull String index) {
final GetMappingsRequest request = new GetMappingsRequest()
.indices(index)
.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN);

final GetMappingsResponse result = client.execute((c, requestOptions) -> c.indices().getMapping(request, requestOptions),
"Couldn't read mapping of index " + index);

final Object metaData = result.mappings().get(index).sourceAsMap().get("_meta");
final Object metaData = getIndexMapping(index).get("_meta");
//noinspection rawtypes
if (metaData instanceof Map map) {
//noinspection unchecked
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import com.github.rholder.retry.WaitStrategies;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import jakarta.annotation.Nonnull;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import jakarta.validation.constraints.NotNull;
Expand Down Expand Up @@ -72,8 +73,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import jakarta.annotation.Nonnull;

import javax.annotation.Nullable;
import java.io.BufferedReader;
import java.io.IOException;
Expand Down Expand Up @@ -312,7 +311,7 @@ List<String> getAllIndicesFrom(final URI uri, final String username, final Strin
try (var response = httpClient.newCall(new Request.Builder().url(url).header("Authorization", Credentials.basic(username, password)).build()).execute()) {
if (response.isSuccessful() && response.body() != null) {
// filtering all indices that start with "." as they indicate a system index - we don't want to reindex those
return new BufferedReader(new StringReader(response.body().string())).lines().filter(i -> !i.startsWith(".")).toList();
return new BufferedReader(new StringReader(response.body().string())).lines().filter(i -> !i.startsWith(".")).sorted().toList();
} else {
throw new RuntimeException("Could not read list of indices from " + host);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,9 @@ private SearchSourceBuilder searchSourceBuilderFrom(ExportMessagesCommand comman
SearchSourceBuilder ssb = new SearchSourceBuilder()
.query(query)
.size(command.chunkSize());
if (!command.exportAllFields()) {
ssb = ssb.fetchSource(command.fieldsInOrder().toArray(new String[]{}), null);
}

return requestStrategy.configure(ssb);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package org.graylog.storage.opensearch2.views.export;

import com.google.common.collect.ImmutableSet;
import jakarta.annotation.Nonnull;
import org.graylog.plugins.views.search.elasticsearch.ElasticsearchQueryString;
import org.graylog.plugins.views.search.elasticsearch.IndexLookup;
import org.graylog.plugins.views.search.export.ExportException;
Expand All @@ -32,7 +33,6 @@
import org.graylog2.indexer.ElasticsearchException;
import org.graylog2.plugin.Message;
import org.graylog2.plugin.indexer.searches.timeranges.AbsoluteRange;
import jakarta.annotation.Nonnull;
import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.Before;
Expand Down Expand Up @@ -192,11 +192,11 @@ public void deliversCompleteLastChunkIfLimitIsReached() {
}

@Test
public void resultsHaveAllMessageFields() {
public void resultsHaveAllMessageFieldsIfFieldsHaveNotBeenExplicitlyChosen() {
importFixture("messages.json");

ExportMessagesCommand command = helper.commandBuilderWithAllTestDefaultStreams()
.fieldsInOrder("timestamp", "message")
.fieldsInOrder(ExportMessagesCommand.ALL_FIELDS)
.build();

LinkedHashSet<SimpleMessageChunk> allChunks = helper.collectChunksFor(command);
Expand Down
4 changes: 2 additions & 2 deletions graylog2-server/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -904,7 +904,7 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>3.5.0</version>
<version>3.6.0</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
Expand All @@ -914,7 +914,7 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>3.2.0</version>
<version>3.3.0</version>
</plugin>
</plugins>
</pluginManagement>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ private List<String> transformToList(Object value) {
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
return Collections.singletonList(value.toString());
return value == null ? Collections.emptyList() : Collections.singletonList(value.toString());
}

private static String convertValue(Object o) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ public abstract class ExportMessagesCommand {
public static final ElasticsearchQueryString DEFAULT_QUERY = ElasticsearchQueryString.empty();
public static final Set<String> DEFAULT_STREAMS = ImmutableSet.of();
public static final LinkedHashSet<String> DEFAULT_FIELDS = linkedHashSetOf("timestamp", "source", "message");

public static final LinkedHashSet<String> ALL_FIELDS = new LinkedHashSet<>();
public static final int DEFAULT_CHUNK_SIZE = 1000;
public static final DateTimeZone DEFAULT_TIME_ZONE = DateTimeZone.UTC;

Expand All @@ -52,6 +54,10 @@ public static AbsoluteRange defaultTimeRange() {
}
}

public boolean exportAllFields() {
return fieldsInOrder().isEmpty();
}

public abstract AbsoluteRange timeRange();

public abstract ElasticsearchQueryString queryString();
Expand Down
Loading

0 comments on commit 6b5edcc

Please sign in to comment.