Skip to content

Commit

Permalink
Merge branch 'main' into fix-recovered-bytes-cold-cache
Browse files Browse the repository at this point in the history
  • Loading branch information
tlrx committed Jun 23, 2023
2 parents 8b7b5dd + 2798c49 commit 33420aa
Show file tree
Hide file tree
Showing 3,531 changed files with 89,482 additions and 33,863 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
4 changes: 2 additions & 2 deletions .backportrc.json
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
{
"upstream" : "elastic/elasticsearch",
"targetBranchChoices" : [ "main", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ],
"targetBranchChoices" : [ "main", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ],
"targetPRLabels" : [ "backport" ],
"branchLabelMapping" : {
"^v8.9.0$" : "main",
"^v8.10.0$" : "main",
"^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2"
}
}
9 changes: 9 additions & 0 deletions .buildkite/check-es-serverless.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
steps:
- trigger: elasticsearch-serverless-update-submodule
label: ":elasticsearch: Check elasticsearch changes against serverless"
build:
message: "Validate latest elasticsearch changes"
env:
ELASTICSEARCH_SUBMODULE_COMMIT: "${BUILDKITE_COMMIT}"
UPDATE_SUBMODULE: "false"

4 changes: 2 additions & 2 deletions .buildkite/update-es-serverless.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
steps:
- trigger: elasticsearch-serverless
label: ":elasticsearch: Run serverless tests"
- trigger: elasticsearch-serverless-update-submodule
label: ":elasticsearch: Update elasticsearch submodule in serverless"
build:
message: "Elasticsearch submodule update build"
env:
Expand Down
4 changes: 3 additions & 1 deletion .ci/bwcVersions
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,8 @@ BWC_VERSION:
- "8.6.2"
- "8.7.0"
- "8.7.1"
- "8.7.2"
- "8.8.0"
- "8.8.1"
- "8.8.2"
- "8.9.0"
- "8.10.0"
1 change: 1 addition & 0 deletions .ci/dockerOnLinuxExclusions
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ sles-12.5
sles-15.1
sles-15.2
sles-15.3
sles-15.4

# These OSes are deprecated and filtered starting with 8.0.0, but need to be excluded
# for PR checks
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
jjbb-template: periodic-trigger-lgc.yml
vars:
- periodic-job: elastic+elasticsearch+%BRANCH%+periodic+concurrent-search-tests
- lgc-job: elastic+elasticsearch+%BRANCH%+intake
- cron: "H H/12 * * *"
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
---
- job:
name: elastic+elasticsearch+%BRANCH%+periodic+concurrent-search-tests
display-name: "elastic / elasticsearch # %BRANCH% - concurrent search tests"
description: "Testing concurrent search enabled for the Elasticsearch %BRANCH% branch.\n"
node: "general-purpose && docker"
builders:
- inject:
properties-file: '.ci/java-versions.properties'
properties-content: |
JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
JAVA11_HOME=$HOME/.java/java11
- shell: |
#!/usr/local/bin/runbld --redirect-stderr
$WORKSPACE/.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true check
1 change: 1 addition & 0 deletions .ci/matrix-runtime-javas.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,4 @@ ES_RUNTIME_JAVA:
- openjdk18
- openjdk19
- openjdk20
- openjdk21
4 changes: 2 additions & 2 deletions .ci/snapshotBwcVersions
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
BWC_VERSION:
- "7.17.11"
- "8.7.2"
- "8.8.0"
- "8.8.2"
- "8.9.0"
- "8.10.0"
12 changes: 6 additions & 6 deletions README.asciidoc
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
= Elasticsearch

Elasticsearch is the distributed, RESTful search and analytics engine at the
heart of the https://www.elastic.co/products[Elastic Stack]. You can use
Elasticsearch to store, search, and manage data for:
Elasticsearch is a distributed, RESTful search engine optimized for speed and relevance on production-scale workloads. You can use Elasticsearch to perform real-time search over massive datasets for applications including:

* Vector search
* Full-text search
* Logs
* Metrics
* A search backend
* Application monitoring
* Endpoint security
* Application performance monitoring (APM)
* Security logs
\... and more!

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public static MapperService create(String mappings) {

SimilarityService similarityService = new SimilarityService(indexSettings, null, Map.of());
MapperService mapperService = new MapperService(
() -> TransportVersion.CURRENT,
() -> TransportVersion.current(),
indexSettings,
IndexAnalyzers.of(
Map.of("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer())),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,9 +144,9 @@ public void setUp() throws Exception {
for (int i = 1; i <= numNodes; i++) {
String id = "node" + i;
nb.add(Allocators.newNode(id, Collections.singletonMap("tag", "tag_" + (i % numTags))));
transportVersions.put(id, TransportVersion.CURRENT);
transportVersions.put(id, TransportVersion.current());
}
initialClusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
initialClusterState = ClusterState.builder(ClusterName.DEFAULT)
.metadata(metadata)
.routingTable(routingTable)
.nodes(nb)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ public void setUp() throws Exception {
mb.put(indexMetadata, false);
}

ClusterState initialClusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
ClusterState initialClusterState = ClusterState.builder(ClusterName.DEFAULT)
.metadata(mb)
.routingTable(routingTable)
.nodes(nb)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MMapDirectory;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
Expand Down Expand Up @@ -148,7 +148,7 @@ public TopDocs benchmark() throws IOException {

private Query scriptScoreQuery(ScoreScript.Factory factory) {
ScoreScript.LeafFactory leafFactory = factory.newFactory(Map.of(), lookup);
return new ScriptScoreQuery(new MatchAllDocsQuery(), null, leafFactory, lookup, null, "test", 0, Version.CURRENT);
return new ScriptScoreQuery(new MatchAllDocsQuery(), null, leafFactory, lookup, null, "test", 0, IndexVersion.CURRENT);
}

private ScoreScript.Factory bareMetalScript() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.core.IOUtils;
Expand Down Expand Up @@ -138,6 +139,7 @@ protected SearchExecutionContext buildSearchExecutionContext() {
0,
0,
mapperService.getIndexSettings(),
ClusterSettings.createBuiltInClusterSettings(),
null,
(ft, fdc) -> ft.fielddataBuilder(fdc).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()),
mapperService,
Expand Down Expand Up @@ -170,7 +172,7 @@ protected final MapperService createMapperService(String mappings) {

SimilarityService similarityService = new SimilarityService(indexSettings, null, Map.of());
MapperService mapperService = new MapperService(
() -> TransportVersion.CURRENT,
() -> TransportVersion.current(),
indexSettings,
(type, name) -> Lucene.STANDARD_ANALYZER,
XContentParserConfiguration.EMPTY.withRegistry(new NamedXContentRegistry(ClusterModule.getNamedXWriteables()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,11 @@ public IndexSettings getIndexSettings() {
throw new UnsupportedOperationException();
}

@Override
public ClusterSettings getClusterSettings() {
throw new UnsupportedOperationException();
}

@Override
public Optional<SortAndFormats> buildSort(List<SortBuilder<?>> sortBuilders) throws IOException {
throw new UnsupportedOperationException();
Expand All @@ -294,6 +299,11 @@ public void addReleasable(Aggregator aggregator) {
releaseMe.add(aggregator);
}

@Override
public void removeReleasable(Aggregator aggregator) {
releaseMe.remove(aggregator);
}

@Override
public int maxBuckets() {
return Integer.MAX_VALUE;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

package org.elasticsearch.benchmark.spatial;

import org.elasticsearch.geometry.LinearRing;
import org.elasticsearch.geometry.simplify.GeometrySimplifier;
import org.elasticsearch.geometry.simplify.SimplificationErrorCalculator;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.concurrent.TimeUnit;
import java.util.zip.GZIPInputStream;

@Fork(1)
@Warmup(iterations = 5)
@Measurement(iterations = 5)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@State(Scope.Thread)
public class GeometrySimplificationBenchmark {
@Param({ "cartesiantrianglearea", "triangleArea", "triangleheight", "heightandbackpathdistance" })
public String calculatorName;

@Param({ "10", "100", "1000", "10000", "20000" })
public int maxPoints;

private GeometrySimplifier<LinearRing> simplifier;
private static LinearRing ring;

@Setup
public void setup() throws ParseException, IOException {
SimplificationErrorCalculator calculator = SimplificationErrorCalculator.byName(calculatorName);
this.simplifier = new GeometrySimplifier.LinearRingSimplifier(maxPoints, calculator);
if (ring == null) {
ring = loadRing("us.json.gz");
}
}

@Benchmark
public void simplify(Blackhole bh) {
bh.consume(simplifier.simplify(ring));
}

private static LinearRing loadRing(@SuppressWarnings("SameParameterValue") String name) throws IOException, ParseException {
String json = loadJsonFile(name);
org.apache.lucene.geo.Polygon[] lucenePolygons = org.apache.lucene.geo.Polygon.fromGeoJSON(json);
LinearRing ring = null;
for (org.apache.lucene.geo.Polygon lucenePolygon : lucenePolygons) {
double[] x = lucenePolygon.getPolyLons();
double[] y = lucenePolygon.getPolyLats();
if (ring == null || x.length > ring.length()) {
ring = new LinearRing(x, y);
}
}
return ring;
}

private static String loadJsonFile(String name) throws IOException {
InputStream is = GeometrySimplificationBenchmark.class.getResourceAsStream(name);
if (is == null) {
throw new FileNotFoundException("classpath resource not found: " + name);
}
if (name.endsWith(".gz")) {
is = new GZIPInputStream(is);
}
BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));
StringBuilder builder = new StringBuilder();
reader.lines().forEach(builder::append);
return builder.toString();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* This project is based on a modification of https://github.com/tdunning/t-digest which is licensed under the Apache 2.0 License.
*/

package org.elasticsearch.benchmark.tdigest;

import org.elasticsearch.tdigest.Sort;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Warmup;

import java.util.Arrays;
import java.util.Random;
import java.util.concurrent.TimeUnit;

/** Explores the performance of Sort on pathological input data. */
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 10, time = 3, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 20, time = 2, timeUnit = TimeUnit.SECONDS)
@Fork(1)
@Threads(1)
@State(Scope.Thread)
public class SortBench {
private final int size = 100000;
private final double[] values = new double[size];

@Param({ "0", "1", "-1" })
public int sortDirection;

@Setup
public void setup() {
Random prng = new Random(999983);
for (int i = 0; i < size; i++) {
values[i] = prng.nextDouble();
}
if (sortDirection > 0) {
Arrays.sort(values);
} else if (sortDirection < 0) {
Arrays.sort(values);
Sort.reverse(values, 0, values.length);
}
}

@Benchmark
public void quicksort() {
int[] order = new int[size];
for (int i = 0; i < size; i++) {
order[i] = i;
}
Sort.sort(order, values, null, values.length);
}
}
Loading

0 comments on commit 33420aa

Please sign in to comment.