Skip to content

Commit

Permalink
Add x-content testing jar (#84775)
Browse files Browse the repository at this point in the history
This commit adds a jar separate from the test framework to provide
utilities for testing x-content related code. The first thing moved
there is the base schema validation test case, which also pulls along
the com.networknt dependency and jackson. For now these are direct
dependencies, though we could consider shading them in the future so as
not to expose downstream projects to them, which may have version
conflicts.
  • Loading branch information
rjernst committed Mar 9, 2022
1 parent eaef161 commit 5f06b5f
Show file tree
Hide file tree
Showing 23 changed files with 119 additions and 92 deletions.
1 change: 1 addition & 0 deletions distribution/tools/geoip-cli/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,5 @@ dependencies {
compileOnly project(":libs:elasticsearch-cli")
compileOnly project(":libs:elasticsearch-x-content")
testImplementation project(":test:framework")
testImplementation "org.apache.commons:commons-compress:1.19"
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
*/
package org.elasticsearch.h3;

import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.elasticsearch.test.ESTestCase;

import java.io.BufferedReader;
Expand All @@ -29,6 +28,7 @@
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import java.util.zip.GZIPInputStream;

public class CellBoundaryTests extends ESTestCase {

Expand Down Expand Up @@ -142,7 +142,7 @@ public void testBc19r14cells() throws Exception {

private void processFile(String file) throws IOException {
InputStream fis = getClass().getResourceAsStream(file + ".gz");
BufferedReader reader = new BufferedReader(new InputStreamReader(new GzipCompressorInputStream(fis), StandardCharsets.UTF_8));
BufferedReader reader = new BufferedReader(new InputStreamReader(new GZIPInputStream(fis), StandardCharsets.UTF_8));
String h3Address = reader.readLine();
while (h3Address != null) {
assertEquals(true, H3.h3IsValid(h3Address));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
*/
package org.elasticsearch.h3;

import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.elasticsearch.test.ESTestCase;

import java.io.BufferedReader;
Expand All @@ -27,6 +26,7 @@
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.StringTokenizer;
import java.util.zip.GZIPInputStream;

public class CellCenterTests extends ESTestCase {

Expand Down Expand Up @@ -144,7 +144,7 @@ public void testBc19r15centers() throws Exception {

private void processFile(String file) throws IOException {
InputStream fis = getClass().getResourceAsStream(file + ".gz");
BufferedReader reader = new BufferedReader(new InputStreamReader(new GzipCompressorInputStream(fis), StandardCharsets.UTF_8));
BufferedReader reader = new BufferedReader(new InputStreamReader(new GZIPInputStream(fis), StandardCharsets.UTF_8));
String line = reader.readLine();
while (line != null) {
StringTokenizer tokenizer = new StringTokenizer(line, " ");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,27 @@

package org.elasticsearch.http;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;

import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.test.XContentTestUtils;

import java.io.IOException;
import java.util.List;
import java.util.Map;

import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.startsWith;
import static org.hamcrest.collection.IsMapContaining.hasKey;

/**
* Tests that by default the error_trace parameter can be used to show stacktraces
*/
public class DetailedErrorsEnabledIT extends HttpSmokeTestCase {

public void testThatErrorTraceCanBeEnabled() throws IOException {
ObjectMapper mapper = new ObjectMapper();

try {
Request request = new Request("DELETE", "/");
Expand All @@ -37,18 +39,20 @@ public void testThatErrorTraceCanBeEnabled() throws IOException {
Response response = e.getResponse();
assertThat(response.getHeader("Content-Type"), containsString("application/json"));

JsonNode jsonNode = mapper.readTree(response.getEntity().getContent());
var jsonNode = XContentTestUtils.createJsonMapView(response.getEntity().getContent());

assertThat(
jsonNode.get("error").get("stack_trace").asText(),
jsonNode.get("error.stack_trace"),
startsWith("org.elasticsearch.action.ActionRequestValidationException: Validation Failed: 1: index / indices is missing")
);

// An ActionRequestValidationException isn't an ElasticsearchException, so when the code tries
// to work out the root cause, all it actually achieves is wrapping the actual exception in
// an ElasticsearchException. At least this proves that the root cause logic is executing.
@SuppressWarnings("unchecked")
Map<String, Object> cause = (Map<String, Object>) jsonNode.<List<Object>>get("error.root_cause").get(0);
assertThat(
jsonNode.get("error").get("root_cause").get(0).get("stack_trace").asText(),
cause.get("stack_trace").toString(),
startsWith("org.elasticsearch.ElasticsearchException$1: Validation Failed: 1: index / indices is missing")
);
}
Expand All @@ -63,14 +67,12 @@ public void testThatErrorTraceDefaultsToDisabled() throws IOException {
Response response = e.getResponse();
assertThat(response.getHeader("Content-Type"), containsString("application/json"));

ObjectMapper mapper = new ObjectMapper();
JsonNode jsonNode = mapper.readTree(response.getEntity().getContent());
var jsonNode = XContentTestUtils.createJsonMapView(response.getEntity().getContent());

assertFalse("Unexpected .stack_trace in JSON response", jsonNode.get("error").has("stack_trace"));
assertFalse(
"Unexpected .error.root_cause[0].stack_trace in JSON response",
jsonNode.get("error").get("root_cause").get(0).has("stack_trace")
);
assertThat("Unexpected .stack_trace in JSON response", jsonNode.get("error.stack_track"), nullValue());
@SuppressWarnings("unchecked")
Map<String, Object> cause = (Map<String, Object>) jsonNode.<List<Object>>get("error.root_cause").get(0);
assertThat("Unexpected .error.root_cause[0].stack_trace in JSON response", cause, not(hasKey("stack_trace")));
}
}
}
3 changes: 2 additions & 1 deletion settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ List projects = [
'test:fixtures:geoip-fixture',
'test:fixtures:url-fixture',
'test:fixtures:nginx-fixture',
'test:logger-usage'
'test:logger-usage',
'test:x-content'
]

/**
Expand Down
30 changes: 0 additions & 30 deletions test/framework/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,6 @@ dependencies {
api 'org.objenesis:objenesis:3.2'

api "org.elasticsearch:mocksocket:${versions.mocksocket}"

// json schema validation dependencies
api "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
api "com.networknt:json-schema-validator:${versions.networknt_json_schema_validator}"
api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
api "org.apache.commons:commons-compress:1.19"
api "org.apache.commons:commons-lang3:${versions.commons_lang3}"
}

// the main files are actually test files, so use the appropriate forbidden api sigs
Expand All @@ -54,11 +46,6 @@ tasks.named("dependenciesGraph").configure { enabled = false }
tasks.named("thirdPartyAudit").configure {
ignoreMissingClasses(
// classes are missing
'com.github.luben.zstd.ZstdInputStream',
'com.github.luben.zstd.ZstdOutputStream',
'java.util.jar.Pack200',
'java.util.jar.Pack200$Packer',
'java.util.jar.Pack200$Unpacker',
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger',
Expand All @@ -68,23 +55,6 @@ tasks.named("thirdPartyAudit").configure {
'org.apache.log4j.Level',
'org.apache.log4j.Logger',
'org.apache.log4j.Priority',
'org.brotli.dec.BrotliInputStream',
'org.jcodings.specific.UTF8Encoding',
'org.joni.Matcher',
'org.joni.Regex',
'org.joni.Syntax',
'org.slf4j.Logger',
'org.slf4j.LoggerFactory',
'org.tukaani.xz.DeltaOptions',
'org.tukaani.xz.FilterOptions',
'org.tukaani.xz.LZMA2InputStream',
'org.tukaani.xz.LZMA2Options',
'org.tukaani.xz.LZMAInputStream',
'org.tukaani.xz.LZMAOutputStream',
'org.tukaani.xz.MemoryLimitException',
'org.tukaani.xz.UnsupportedOptionsException',
'org.tukaani.xz.XZ',
'org.tukaani.xz.XZOutputStream',

// mockito
'net.bytebuddy.agent.ByteBuddyAgent',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@

package org.elasticsearch.test;

import com.fasterxml.jackson.core.io.JsonStringEncoder;

import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
Expand Down Expand Up @@ -41,7 +39,9 @@
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xcontent.json.JsonStringEncoder;
import org.elasticsearch.xcontent.json.JsonXContent;
import org.elasticsearch.xcontent.spi.XContentProvider;

import java.io.IOException;
import java.time.Instant;
Expand Down Expand Up @@ -646,7 +646,7 @@ protected static Object getRandomValueForFieldName(String fieldName) {
case TEXT_ALIAS_FIELD_NAME:
if (rarely()) {
// unicode in 10% cases
JsonStringEncoder encoder = JsonStringEncoder.getInstance();
JsonStringEncoder encoder = XContentProvider.provider().getJsonStringEncoder();
value = new String(encoder.quoteAsString(randomUnicodeOfLength(10)));
} else {
value = randomAlphaOfLengthBetween(1, 10);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ public <T> T get(String path) {
} else if (context instanceof List) {
context = ((List<Object>) context).get(Integer.parseInt(key));
} else {
throw new IllegalStateException("neither list nor map");
return null; // node does not exist
}
}
return (T) context;
Expand Down
64 changes: 64 additions & 0 deletions test/x-content/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

apply plugin: 'elasticsearch.build'
apply plugin: 'elasticsearch.publish'

dependencies {
api project(":test:framework")
api project(":libs:elasticsearch-x-content")

// json schema validation dependencies
implementation "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
implementation "com.networknt:json-schema-validator:${versions.networknt_json_schema_validator}"
implementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
implementation "org.apache.commons:commons-compress:1.19"
implementation "org.apache.commons:commons-lang3:${versions.commons_lang3}"
}

// the main files are actually test files, so use the appropriate forbidden api sigs
tasks.named('forbiddenApisMain').configure {
replaceSignatureFiles 'jdk-signatures', 'es-all-signatures', 'es-test-signatures'
}

// TODO: should we have licenses for our test deps?
tasks.named("dependencyLicenses").configure { enabled = false }
tasks.named("dependenciesInfo").configure { enabled = false }
tasks.named("dependenciesGraph").configure { enabled = false }

// no tests of the tests, yet
tasks.named("testingConventions").configure { enabled = false }

tasks.named("thirdPartyAudit").configure {
ignoreMissingClasses(
// classes are missing
'com.github.luben.zstd.ZstdInputStream',
'com.github.luben.zstd.ZstdOutputStream',
'java.util.jar.Pack200',
'java.util.jar.Pack200$Packer',
'java.util.jar.Pack200$Unpacker',
'org.brotli.dec.BrotliInputStream',
'org.jcodings.specific.UTF8Encoding',
'org.joni.Matcher',
'org.joni.Regex',
'org.joni.Syntax',
'org.slf4j.Logger',
'org.slf4j.LoggerFactory',
'org.tukaani.xz.DeltaOptions',
'org.tukaani.xz.FilterOptions',
'org.tukaani.xz.LZMA2InputStream',
'org.tukaani.xz.LZMA2Options',
'org.tukaani.xz.LZMAInputStream',
'org.tukaani.xz.LZMAOutputStream',
'org.tukaani.xz.MemoryLimitException',
'org.tukaani.xz.UnsupportedOptionsException',
'org.tukaani.xz.XZ',
'org.tukaani.xz.XZOutputStream',
)
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
* Side Public License, v 1.
*/

package org.elasticsearch.test;
package org.elasticsearch.test.xcontent;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
Expand All @@ -22,6 +22,7 @@

import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
Expand Down
1 change: 1 addition & 0 deletions x-pack/plugin/core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ dependencies {
testImplementation project(path: ':modules:lang-mustache')
testImplementation project(path: ':modules:analysis-common')
testImplementation project(":client:rest-high-level")
testImplementation project(":test:x-content")
// Needed for Fips140ProviderVerificationTests
testCompileOnly('org.bouncycastle:bc-fips:1.0.2')

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

package org.elasticsearch.xpack.core.transform.action.schema;

import org.elasticsearch.test.AbstractSchemaValidationTestCase;
import org.elasticsearch.test.xcontent.AbstractSchemaValidationTestCase;
import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction.Response;

import static org.elasticsearch.xpack.core.transform.action.GetTransformStatsActionResponseTests.randomTransformStatsResponse;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

package org.elasticsearch.xpack.core.transform.transforms.schema;

import org.elasticsearch.test.AbstractSchemaValidationTestCase;
import org.elasticsearch.test.xcontent.AbstractSchemaValidationTestCase;
import org.elasticsearch.xpack.core.transform.transforms.TimeRetentionPolicyConfig;

import static org.elasticsearch.xpack.core.transform.transforms.TimeRetentionPolicyConfigTests.randomTimeRetentionPolicyConfig;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

package org.elasticsearch.xpack.core.transform.transforms.schema;

import org.elasticsearch.test.AbstractSchemaValidationTestCase;
import org.elasticsearch.test.xcontent.AbstractSchemaValidationTestCase;
import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointStats;

import static org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointStatsTests.randomTransformCheckpointStats;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

package org.elasticsearch.xpack.core.transform.transforms.schema;

import org.elasticsearch.test.AbstractSchemaValidationTestCase;
import org.elasticsearch.test.xcontent.AbstractSchemaValidationTestCase;
import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint;

import static org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointTests.randomTransformCheckpoint;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

package org.elasticsearch.xpack.core.transform.transforms.schema;

import org.elasticsearch.test.AbstractSchemaValidationTestCase;
import org.elasticsearch.test.xcontent.AbstractSchemaValidationTestCase;
import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo;

import static org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfoTests.randomTransformCheckpointingInfo;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

package org.elasticsearch.xpack.core.transform.transforms.schema;

import org.elasticsearch.test.AbstractSchemaValidationTestCase;
import org.elasticsearch.test.xcontent.AbstractSchemaValidationTestCase;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.ToXContent.Params;
import org.elasticsearch.xpack.core.transform.TransformField;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

package org.elasticsearch.xpack.core.transform.transforms.schema;

import org.elasticsearch.test.AbstractSchemaValidationTestCase;
import org.elasticsearch.test.xcontent.AbstractSchemaValidationTestCase;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats;

import static org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStatsTests.randomStats;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

package org.elasticsearch.xpack.core.transform.transforms.schema;

import org.elasticsearch.test.AbstractSchemaValidationTestCase;
import org.elasticsearch.test.xcontent.AbstractSchemaValidationTestCase;
import org.elasticsearch.xpack.core.transform.transforms.TransformStats;

import static org.elasticsearch.xpack.core.transform.transforms.TransformStatsTests.randomTransformStats;
Expand Down

0 comments on commit 5f06b5f

Please sign in to comment.