From 4fbe84edf6a15f3c6409f6fd3104e40cbb90543c Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 26 Sep 2018 09:33:08 -0700 Subject: [PATCH 01/25] Painless: Cleanup Cache (#33963) * Adds equals/hashcode methods to the Painless* objects within the lookup package. * Changes the caches to use the Painless* objects as keys as well as values. This forces future changes to taken into account the appropriate values for caching. * Deletes the existing caching objects in favor of Painless* objects. This removes a pair of bugs that were not taking into account subtle corner cases related to augmented methods and caching. * Uses the Painless* objects to check for equivalency in existing Painless* objects that may have already been added to a PainlessClass. This removes a bug related to return not being appropriately checked when adding methods. * Cleans up several error messages. --- .../painless/lookup/PainlessCast.java | 47 ++ .../painless/lookup/PainlessClass.java | 26 + .../painless/lookup/PainlessClassBinding.java | 25 + .../painless/lookup/PainlessClassBuilder.java | 26 + .../painless/lookup/PainlessConstructor.java | 23 + .../painless/lookup/PainlessField.java | 22 + .../lookup/PainlessLookupBuilder.java | 464 ++++++------------ .../painless/lookup/PainlessMethod.java | 25 + 8 files changed, 336 insertions(+), 322 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java index f87f8a134b8c4..98968465d344e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java @@ -19,10 +19,15 @@ package org.elasticsearch.painless.lookup; +import java.util.Objects; + public class PainlessCast { /** Create a standard cast with no boxing/unboxing. */ public static PainlessCast originalTypetoTargetType(Class originalType, Class targetType, boolean explicitCast) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + return new PainlessCast(originalType, targetType, explicitCast, null, null, null, null); } @@ -30,6 +35,10 @@ public static PainlessCast originalTypetoTargetType(Class originalType, Class public static PainlessCast unboxOriginalType( Class originalType, Class targetType, boolean explicitCast, Class unboxOriginalType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(unboxOriginalType); + return new PainlessCast(originalType, targetType, explicitCast, unboxOriginalType, null, null, null); } @@ -37,6 +46,10 @@ public static PainlessCast unboxOriginalType( public static PainlessCast unboxTargetType( Class originalType, Class targetType, boolean explicitCast, Class unboxTargetType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(unboxTargetType); + return new PainlessCast(originalType, targetType, explicitCast, null, unboxTargetType, null, null); } @@ -44,6 +57,10 @@ public static PainlessCast unboxTargetType( public static PainlessCast boxOriginalType( Class originalType, Class targetType, boolean explicitCast, Class boxOriginalType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(boxOriginalType); + return new PainlessCast(originalType, targetType, explicitCast, null, null, boxOriginalType, null); } @@ -51,6 +68,10 @@ public static PainlessCast boxOriginalType( public static PainlessCast boxTargetType( Class originalType, Class targetType, boolean explicitCast, Class boxTargetType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(boxTargetType); + return new PainlessCast(originalType, targetType, explicitCast, null, null, null, boxTargetType); } @@ -73,4 +94,30 @@ private PainlessCast(Class originalType, Class targetType, boolean explici this.boxOriginalType = boxOriginalType; this.boxTargetType = boxTargetType; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessCast that = (PainlessCast)object; + + return explicitCast == that.explicitCast && + Objects.equals(originalType, that.originalType) && + Objects.equals(targetType, that.targetType) && + Objects.equals(unboxOriginalType, that.unboxOriginalType) && + Objects.equals(unboxTargetType, that.unboxTargetType) && + Objects.equals(boxOriginalType, that.boxOriginalType) && + Objects.equals(boxTargetType, that.boxTargetType); + } + + @Override + public int hashCode() { + return Objects.hash(originalType, targetType, explicitCast, unboxOriginalType, unboxTargetType, boxOriginalType, boxTargetType); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index f5d6c97bb2f3e..786b4c6a3b997 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -22,6 +22,7 @@ import java.lang.invoke.MethodHandle; import java.util.Collections; import java.util.Map; +import java.util.Objects; public final class PainlessClass { @@ -57,4 +58,29 @@ public final class PainlessClass { this.functionalInterfaceMethod = functionalInterfaceMethod; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessClass that = (PainlessClass)object; + + return Objects.equals(constructors, that.constructors) && + Objects.equals(staticMethods, that.staticMethods) && + Objects.equals(methods, that.methods) && + Objects.equals(staticFields, that.staticFields) && + Objects.equals(fields, that.fields) && + Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod); + } + + @Override + public int hashCode() { + return Objects.hash(constructors, staticMethods, methods, staticFields, fields, functionalInterfaceMethod); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java index 3418b2d82446c..0f28830b3d4ab 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java @@ -22,6 +22,7 @@ import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.util.List; +import java.util.Objects; public class PainlessClassBinding { @@ -38,4 +39,28 @@ public class PainlessClassBinding { this.returnType = returnType; this.typeParameters = typeParameters; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessClassBinding that = (PainlessClassBinding)object; + + return Objects.equals(javaConstructor, that.javaConstructor) && + Objects.equals(javaMethod, that.javaMethod) && + Objects.equals(returnType, that.returnType) && + Objects.equals(typeParameters, that.typeParameters); + } + + @Override + public int hashCode() { + + return Objects.hash(javaConstructor, javaMethod, returnType, typeParameters); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java index 92100d1bda0c0..fbf9e45bf16ef 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -22,6 +22,7 @@ import java.lang.invoke.MethodHandle; import java.util.HashMap; import java.util.Map; +import java.util.Objects; final class PainlessClassBuilder { @@ -57,4 +58,29 @@ PainlessClass build() { return new PainlessClass(constructors, staticMethods, methods, staticFields, fields, getterMethodHandles, setterMethodHandles, functionalInterfaceMethod); } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessClassBuilder that = (PainlessClassBuilder)object; + + return Objects.equals(constructors, that.constructors) && + Objects.equals(staticMethods, that.staticMethods) && + Objects.equals(methods, that.methods) && + Objects.equals(staticFields, that.staticFields) && + Objects.equals(fields, that.fields) && + Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod); + } + + @Override + public int hashCode() { + return Objects.hash(constructors, staticMethods, methods, staticFields, fields, functionalInterfaceMethod); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java index a3dc6c8122bd6..0f890e88b736b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java @@ -23,6 +23,7 @@ import java.lang.invoke.MethodType; import java.lang.reflect.Constructor; import java.util.List; +import java.util.Objects; public class PainlessConstructor { @@ -37,4 +38,26 @@ public class PainlessConstructor { this.methodHandle = methodHandle; this.methodType = methodType; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessConstructor that = (PainlessConstructor)object; + + return Objects.equals(javaConstructor, that.javaConstructor) && + Objects.equals(typeParameters, that.typeParameters) && + Objects.equals(methodType, that.methodType); + } + + @Override + public int hashCode() { + return Objects.hash(javaConstructor, typeParameters, methodType); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java index 9567e97331c7a..72a57159b44fc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java @@ -21,6 +21,7 @@ import java.lang.invoke.MethodHandle; import java.lang.reflect.Field; +import java.util.Objects; public final class PainlessField { @@ -37,4 +38,25 @@ public final class PainlessField { this.getterMethodHandle = getterMethodHandle; this.setterMethodHandle = setterMethodHandle; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessField that = (PainlessField)object; + + return Objects.equals(javaField, that.javaField) && + Objects.equals(typeParameter, that.typeParameter); + } + + @Override + public int hashCode() { + return Objects.hash(javaField, typeParameter); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index ce451f3dca893..b3bc8580b38f3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -20,8 +20,8 @@ package org.elasticsearch.painless.lookup; import org.elasticsearch.painless.spi.Whitelist; -import org.elasticsearch.painless.spi.WhitelistClassBinding; import org.elasticsearch.painless.spi.WhitelistClass; +import org.elasticsearch.painless.spi.WhitelistClassBinding; import org.elasticsearch.painless.spi.WhitelistConstructor; import org.elasticsearch.painless.spi.WhitelistField; import org.elasticsearch.painless.spi.WhitelistMethod; @@ -34,7 +34,6 @@ import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -51,155 +50,10 @@ public final class PainlessLookupBuilder { - private static class PainlessConstructorCacheKey { - - private final Class targetClass; - private final List> typeParameters; - - private PainlessConstructorCacheKey(Class targetClass, List> typeParameters) { - this.targetClass = targetClass; - this.typeParameters = Collections.unmodifiableList(typeParameters); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessConstructorCacheKey that = (PainlessConstructorCacheKey)object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(typeParameters, that.typeParameters); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, typeParameters); - } - } - - private static class PainlessMethodCacheKey { - - private final Class targetClass; - private final String methodName; - private final Class returnType; - private final List> typeParameters; - - private PainlessMethodCacheKey(Class targetClass, String methodName, Class returnType, List> typeParameters) { - this.targetClass = targetClass; - this.methodName = methodName; - this.returnType = returnType; - this.typeParameters = Collections.unmodifiableList(typeParameters); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessMethodCacheKey that = (PainlessMethodCacheKey)object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(methodName, that.methodName) && - Objects.equals(returnType, that.returnType) && - Objects.equals(typeParameters, that.typeParameters); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, methodName, returnType, typeParameters); - } - } - - private static class PainlessFieldCacheKey { - - private final Class targetClass; - private final String fieldName; - private final Class typeParameter; - - private PainlessFieldCacheKey(Class targetClass, String fieldName, Class typeParameter) { - this.targetClass = targetClass; - this.fieldName = fieldName; - this.typeParameter = typeParameter; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessFieldCacheKey that = (PainlessFieldCacheKey) object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(fieldName, that.fieldName) && - Objects.equals(typeParameter, that.typeParameter); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, fieldName, typeParameter); - } - } - - private static class PainlessClassBindingCacheKey { - - private final Class targetClass; - private final String methodName; - private final Class methodReturnType; - private final List> methodTypeParameters; - - private PainlessClassBindingCacheKey(Class targetClass, - String methodName, Class returnType, List> typeParameters) { - - this.targetClass = targetClass; - this.methodName = methodName; - this.methodReturnType = returnType; - this.methodTypeParameters = Collections.unmodifiableList(typeParameters); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessClassBindingCacheKey that = (PainlessClassBindingCacheKey)object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(methodName, that.methodName) && - Objects.equals(methodReturnType, that.methodReturnType) && - Objects.equals(methodTypeParameters, that.methodTypeParameters); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, methodName, methodReturnType, methodTypeParameters); - } - } - - private static final Map painlessConstructorCache = new HashMap<>(); - private static final Map painlessMethodCache = new HashMap<>(); - private static final Map painlessFieldCache = new HashMap<>(); - private static final Map painlessClassBindingCache = new HashMap<>(); + private static final Map painlessConstructorCache = new HashMap<>(); + private static final Map painlessMethodCache = new HashMap<>(); + private static final Map painlessFieldCache = new HashMap<>(); + private static final Map painlessClassBindingCache = new HashMap<>(); private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); @@ -335,8 +189,7 @@ public void addPainlessClass(Class clazz, boolean importClassName) { throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]"); } - - Class existingClass = canonicalClassNamesToClasses.get(typeToCanonicalTypeName(clazz)); + Class existingClass = canonicalClassNamesToClasses.get(canonicalClassName); if (existingClass != null && existingClass != clazz) { throw new IllegalArgumentException("class [" + canonicalClassName + "] " + @@ -360,22 +213,22 @@ public void addPainlessClass(Class clazz, boolean importClassName) { throw new IllegalArgumentException("must use no_import parameter on class [" + canonicalClassName + "] with no package"); } } else { - Class importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); + Class importedClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); - if (importedPainlessClass == null) { + if (importedClass == null) { if (importClassName) { if (existingPainlessClassBuilder != null) { throw new IllegalArgumentException( - "inconsistent no_import parameters found for class [" + canonicalClassName + "]"); + "inconsistent no_import parameter found for class [" + canonicalClassName + "]"); } canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); } - } else if (importedPainlessClass != clazz) { + } else if (importedClass != clazz) { throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " + - "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]"); + "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedClass) + "]"); } else if (importClassName == false) { - throw new IllegalArgumentException("inconsistent no_import parameters found for class [" + canonicalClassName + "]"); + throw new IllegalArgumentException("inconsistent no_import parameter found for class [" + canonicalClassName + "]"); } } } @@ -440,36 +293,32 @@ public void addPainlessConstructor(Class targetClass, List> typePara try { javaConstructor = targetClass.getConstructor(javaTypeParameters.toArray(new Class[typeParametersSize])); } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("constructor reflection object " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + throw new IllegalArgumentException("reflection object not found for constructor " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", nsme); } - String painlessConstructorKey = buildPainlessConstructorKey(typeParametersSize); - PainlessConstructor painlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey); - - if (painlessConstructor == null) { - MethodHandle methodHandle; - - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("constructor method handle " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } + MethodHandle methodHandle; - MethodType methodType = methodHandle.type(); + try { + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle not found for constructor " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); + } - painlessConstructor = painlessConstructorCache.computeIfAbsent( - new PainlessConstructorCacheKey(targetClass, typeParameters), - key -> new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType) - ); + MethodType methodType = methodHandle.type(); - painlessClassBuilder.constructors.put(painlessConstructorKey, painlessConstructor); - } else if (painlessConstructor.typeParameters.equals(typeParameters) == false){ - throw new IllegalArgumentException("cannot have constructors " + + String painlessConstructorKey = buildPainlessConstructorKey(typeParametersSize); + PainlessConstructor existingPainlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey); + PainlessConstructor newPainlessConstructor = new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType); + + if (existingPainlessConstructor == null) { + newPainlessConstructor = painlessConstructorCache.computeIfAbsent(newPainlessConstructor, key -> key); + painlessClassBuilder.constructors.put(painlessConstructorKey, newPainlessConstructor); + } else if (newPainlessConstructor.equals(existingPainlessConstructor) == false){ + throw new IllegalArgumentException("cannot add constructors with the same arity but are not equivalent for constructors " + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.typeParameters) + "] " + - "with the same arity and different type parameters"); + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(existingPainlessConstructor.typeParameters) + "]"); } } @@ -578,8 +427,8 @@ public void addPainlessMethod(Class targetClass, Class augmentedClass, try { javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + throw new IllegalArgumentException("reflection object not found for method [[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", nsme); } } else { try { @@ -591,9 +440,9 @@ public void addPainlessMethod(Class targetClass, Class augmentedClass, "[" + typeToCanonicalTypeName(augmentedClass) + "] must be static"); } } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + - "with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme); + throw new IllegalArgumentException("reflection object not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] " + + "with augmented class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme); } } @@ -604,78 +453,53 @@ public void addPainlessMethod(Class targetClass, Class augmentedClass, typesToCanonicalTypeNames(typeParameters) + "]"); } - String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); - - if (augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { - PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey); - - if (painlessMethod == null) { - MethodHandle methodHandle; - - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("static method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } + MethodHandle methodHandle; - MethodType methodType = methodHandle.type(); - - painlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType)); - - painlessClassBuilder.staticMethods.put(painlessMethodKey, painlessMethod); - } else if (painlessMethod.returnType == returnType && painlessMethod.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have static methods " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(returnType) + "], " + - typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(painlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(painlessMethod.typeParameters) + "] " + - "with the same arity and different return type or type parameters"); + if (augmentedClass == null) { + try { + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle not found for method " + + "[[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]", iae); } } else { - PainlessMethod painlessMethod = painlessClassBuilder.methods.get(painlessMethodKey); - - if (painlessMethod == null) { - MethodHandle methodHandle; + try { + methodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle not found for method " + + "[[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]" + + "with augmented class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae); + } + } - if (augmentedClass == null) { - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } - } else { - try { - methodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + - "with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae); - } - } + MethodType methodType = methodHandle.type(); - MethodType methodType = methodHandle.type(); - - painlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType)); - - painlessClassBuilder.methods.put(painlessMethodKey, painlessMethod); - } else if (painlessMethod.returnType == returnType && painlessMethod.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have methods " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(returnType) + "], " + - typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(painlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(painlessMethod.typeParameters) + "] " + - "with the same arity and different return type or type parameters"); + boolean isStatic = augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers()); + String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); + PainlessMethod existingPainlessMethod = isStatic ? + painlessClassBuilder.staticMethods.get(painlessMethodKey) : + painlessClassBuilder.methods.get(painlessMethodKey); + PainlessMethod newPainlessMethod = + new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType); + + if (existingPainlessMethod == null) { + newPainlessMethod = painlessMethodCache.computeIfAbsent(newPainlessMethod, key -> key); + + if (isStatic) { + painlessClassBuilder.staticMethods.put(painlessMethodKey, newPainlessMethod); + } else { + painlessClassBuilder.methods.put(painlessMethodKey, newPainlessMethod); } + } else if (newPainlessMethod.equals(existingPainlessMethod) == false) { + throw new IllegalArgumentException("cannot add methods with the same name and arity but are not equivalent for methods " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(returnType) + "], " + + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(existingPainlessMethod.returnType) + "], " + + typesToCanonicalTypeNames(existingPainlessMethod.typeParameters) + "]"); } } @@ -687,7 +511,8 @@ public void addPainlessField(String targetCanonicalClassName, String fieldName, Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); if (targetClass == null) { - throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for field " + + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + canonicalTypeNameParameter + "]]"); } Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); @@ -721,7 +546,8 @@ public void addPainlessField(Class targetClass, String fieldName, Class ty PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); if (painlessClassBuilder == null) { - throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for field " + + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "]]"); } if (isValidType(typeParameter) == false) { @@ -735,7 +561,7 @@ public void addPainlessField(Class targetClass, String fieldName, Class ty javaField = targetClass.getField(fieldName); } catch (NoSuchFieldException nsme) { throw new IllegalArgumentException( - "field reflection object [[" + targetCanonicalClassName + "], [" + fieldName + "] not found", nsme); + "reflection object not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", nsme); } if (javaField.getType() != typeToJavaType(typeParameter)) { @@ -760,20 +586,18 @@ public void addPainlessField(Class targetClass, String fieldName, Class ty throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "], [" + fieldName + "]] must be final"); } - PainlessField painlessField = painlessClassBuilder.staticFields.get(painlessFieldKey); + PainlessField existingPainlessField = painlessClassBuilder.staticFields.get(painlessFieldKey); + PainlessField newPainlessField = new PainlessField(javaField, typeParameter, methodHandleGetter, null); - if (painlessField == null) { - painlessField = painlessFieldCache.computeIfAbsent( - new PainlessFieldCacheKey(targetClass, fieldName, typeParameter), - key -> new PainlessField(javaField, typeParameter, methodHandleGetter, null)); - - painlessClassBuilder.staticFields.put(painlessFieldKey, painlessField); - } else if (painlessField.typeParameter != typeParameter) { - throw new IllegalArgumentException("cannot have static fields " + + if (existingPainlessField == null) { + newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key); + painlessClassBuilder.staticFields.put(painlessFieldKey, newPainlessField); + } else if (newPainlessField.equals(existingPainlessField) == false) { + throw new IllegalArgumentException("cannot add fields with the same name but are not equivalent for fields " + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "] and " + - "[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " + - typeToCanonicalTypeName(painlessField.typeParameter) + "] " + + "[[" + targetCanonicalClassName + "], [" + existingPainlessField.javaField.getName() + "], " + + typeToCanonicalTypeName(existingPainlessField.typeParameter) + "] " + "with the same name and different type parameters"); } } else { @@ -786,35 +610,41 @@ public void addPainlessField(Class targetClass, String fieldName, Class ty "setter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); } - PainlessField painlessField = painlessClassBuilder.fields.get(painlessFieldKey); - - if (painlessField == null) { - painlessField = painlessFieldCache.computeIfAbsent( - new PainlessFieldCacheKey(targetClass, painlessFieldKey, typeParameter), - key -> new PainlessField(javaField, typeParameter, methodHandleGetter, methodHandleSetter)); + PainlessField existingPainlessField = painlessClassBuilder.fields.get(painlessFieldKey); + PainlessField newPainlessField = new PainlessField(javaField, typeParameter, methodHandleGetter, methodHandleSetter); - painlessClassBuilder.fields.put(fieldName, painlessField); - } else if (painlessField.typeParameter != typeParameter) { - throw new IllegalArgumentException("cannot have fields " + + if (existingPainlessField == null) { + newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key); + painlessClassBuilder.fields.put(painlessFieldKey, newPainlessField); + } else if (newPainlessField.equals(existingPainlessField) == false) { + throw new IllegalArgumentException("cannot add fields with the same name but are not equivalent for fields " + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "] and " + - "[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " + - typeToCanonicalTypeName(painlessField.typeParameter) + "] " + + "[[" + targetCanonicalClassName + "], [" + existingPainlessField.javaField.getName() + "], " + + typeToCanonicalTypeName(existingPainlessField.typeParameter) + "] " + "with the same name and different type parameters"); } } } - public void addImportedPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, + public void addImportedPainlessMethod(ClassLoader classLoader, String targetJavaClassName, String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters) { Objects.requireNonNull(classLoader); - Objects.requireNonNull(targetCanonicalClassName); + Objects.requireNonNull(targetJavaClassName); Objects.requireNonNull(methodName); Objects.requireNonNull(returnCanonicalTypeName); Objects.requireNonNull(canonicalTypeNameParameters); - Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); + Class targetClass; + + try { + targetClass = Class.forName(targetJavaClassName, true, classLoader); + } catch (ClassNotFoundException cnfe) { + throw new IllegalArgumentException("class [" + targetJavaClassName + "] not found", cnfe); + } + + String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); if (targetClass == null) { throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for imported method " + @@ -913,35 +743,33 @@ public void addImportedPainlessMethod(Class targetClass, String methodName, C throw new IllegalArgumentException("imported method and class binding cannot have the same name [" + methodName + "]"); } - PainlessMethod importedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey); - - if (importedPainlessMethod == null) { - MethodHandle methodHandle; + MethodHandle methodHandle; - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } + try { + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); + } - MethodType methodType = methodHandle.type(); + MethodType methodType = methodHandle.type(); - importedPainlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType)); + PainlessMethod existingImportedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey); + PainlessMethod newImportedPainlessMethod = + new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType); - painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, importedPainlessMethod); - } else if (importedPainlessMethod.returnType == returnType && - importedPainlessMethod.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have imported methods " + + if (existingImportedPainlessMethod == null) { + newImportedPainlessMethod = painlessMethodCache.computeIfAbsent(newImportedPainlessMethod, key -> key); + painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, newImportedPainlessMethod); + } else if (newImportedPainlessMethod.equals(existingImportedPainlessMethod) == false) { + throw new IllegalArgumentException("cannot add imported methods with the same name and arity " + + "but are not equivalent for methods " + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + "[" + typeToCanonicalTypeName(returnType) + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(importedPainlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(importedPainlessMethod.typeParameters) + "] " + - "with the same arity and different return type or type parameters"); + "[" + typeToCanonicalTypeName(existingImportedPainlessMethod.returnType) + "], " + + typesToCanonicalTypeNames(existingImportedPainlessMethod.typeParameters) + "]"); } } @@ -987,7 +815,6 @@ public void addPainlessClassBinding(ClassLoader classLoader, String targetJavaCl } public void addPainlessClassBinding(Class targetClass, String methodName, Class returnType, List> typeParameters) { - Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); Objects.requireNonNull(returnType); @@ -1100,31 +927,24 @@ public void addPainlessClassBinding(Class targetClass, String methodName, Cla throw new IllegalArgumentException("class binding and imported method cannot have the same name [" + methodName + "]"); } - PainlessClassBinding painlessClassBinding = painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey); - - if (painlessClassBinding == null) { - Constructor finalJavaConstructor = javaConstructor; - Method finalJavaMethod = javaMethod; - - painlessClassBinding = painlessClassBindingCache.computeIfAbsent( - new PainlessClassBindingCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessClassBinding(finalJavaConstructor, finalJavaMethod, returnType, typeParameters)); + PainlessClassBinding existingPainlessClassBinding = painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey); + PainlessClassBinding newPainlessClassBinding = + new PainlessClassBinding(javaConstructor, javaMethod, returnType, typeParameters); - painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey, painlessClassBinding); - } else if (painlessClassBinding.javaConstructor.equals(javaConstructor) == false || - painlessClassBinding.javaMethod.equals(javaMethod) == false || - painlessClassBinding.returnType != returnType || - painlessClassBinding.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have class bindings " + + if (existingPainlessClassBinding == null) { + newPainlessClassBinding = painlessClassBindingCache.computeIfAbsent(newPainlessClassBinding, key -> key); + painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey, newPainlessClassBinding); + } else if (newPainlessClassBinding.equals(existingPainlessClassBinding)) { + throw new IllegalArgumentException("cannot add class bindings with the same name and arity " + + "but are not equivalent for methods " + "[[" + targetCanonicalClassName + "], " + "[" + methodName + "], " + "[" + typeToCanonicalTypeName(returnType) + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + "[[" + targetCanonicalClassName + "], " + "[" + methodName + "], " + - "[" + typeToCanonicalTypeName(painlessClassBinding.returnType) + "], " + - typesToCanonicalTypeNames(painlessClassBinding.typeParameters) + "] and " + - "with the same name and arity but different constructors or methods"); + "[" + typeToCanonicalTypeName(existingPainlessClassBinding.returnType) + "], " + + typesToCanonicalTypeNames(existingPainlessClassBinding.typeParameters) + "]"); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java index 89462170ae5e8..ce10d7a1b891c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java @@ -24,6 +24,7 @@ import java.lang.reflect.Method; import java.util.Collections; import java.util.List; +import java.util.Objects; public class PainlessMethod { @@ -44,4 +45,28 @@ public PainlessMethod(Method javaMethod, Class targetClass, Class returnTy this.methodHandle = methodHandle; this.methodType = methodType; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessMethod that = (PainlessMethod)object; + + return Objects.equals(javaMethod, that.javaMethod) && + Objects.equals(targetClass, that.targetClass) && + Objects.equals(returnType, that.returnType) && + Objects.equals(typeParameters, that.typeParameters) && + Objects.equals(methodType, that.methodType); + } + + @Override + public int hashCode() { + return Objects.hash(javaMethod, targetClass, returnType, typeParameters, methodType); + } } From de8bfb908f6fe1706e66bdf0d2e65ac1539cab59 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Wed, 26 Sep 2018 09:36:41 -0700 Subject: [PATCH 02/25] Delegate wildcard query creation to MappedFieldType. (#34062) * Delegate wildcard query creation to MappedFieldType. * Disallow wildcard queries on collation fields. * Disallow wildcard queries on non-string fields. --- .../ICUCollationKeywordFieldMapper.java | 12 ++++++---- .../index/mapper/CollationFieldTypeTests.java | 8 +++++++ .../index/mapper/IndexFieldMapper.java | 11 +++++++++- .../index/mapper/MappedFieldType.java | 4 ++++ .../index/mapper/StringFieldType.java | 13 +++++++++++ .../index/query/WildcardQueryBuilder.java | 22 +++++++++---------- 6 files changed, 53 insertions(+), 17 deletions(-) diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java index 055c792282267..c44152dd5013c 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java @@ -23,7 +23,6 @@ import com.ibm.icu.text.RawCollationKey; import com.ibm.icu.text.RuleBasedCollator; import com.ibm.icu.util.ULocale; - import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; @@ -159,18 +158,23 @@ protected BytesRef indexedValueForSearch(Object value) { @Override public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("[fuzzy] queries are not supported on [" + CONTENT_TYPE + "] fields."); } @Override public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("[prefix] queries are not supported on [" + CONTENT_TYPE + "] fields."); + } + + @Override + public Query wildcardQuery(String value, QueryShardContext context) { + throw new UnsupportedOperationException("[wildcard] queries are not supported on [" + CONTENT_TYPE + "] fields."); } @Override public Query regexpQuery(String value, int flags, int maxDeterminizedStates, MultiTermQuery.RewriteMethod method, QueryShardContext context) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("[regexp] queries are not supported on [" + CONTENT_TYPE + "] fields."); } public static DocValueFormat COLLATE_FORMAT = new DocValueFormat() { diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java index 71d8f25bf9f3b..f90971412358d 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java @@ -121,6 +121,14 @@ public void testPrefixQuery() { () -> ft.prefixQuery("prefix", null, null)); } + public void testWildcardQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + expectThrows(UnsupportedOperationException.class, + () -> ft.wildcardQuery("foo*", null)); + } + public void testRangeQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index bb048ab9afac4..fac00907980fd 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -150,8 +150,17 @@ public Query termsQuery(List values, QueryShardContext context) { + " vs. " + values); } + @Override + public Query wildcardQuery(String value, QueryShardContext context) { + if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) { + return Queries.newMatchAllQuery(); + } else { + return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName() + " vs. " + value); + } + } + private boolean isSameIndex(Object value, String indexName) { - String pattern = value instanceof BytesRef ? pattern = ((BytesRef) value).utf8ToString() : value.toString(); + String pattern = value instanceof BytesRef ? ((BytesRef) value).utf8ToString() : value.toString(); return Regex.simpleMatch(pattern, indexName); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 4ce7af9f09fb0..5a2c8991e0aaa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -345,6 +345,10 @@ public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod me throw new QueryShardException(context, "Can only use prefix queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } + public Query wildcardQuery(String value, QueryShardContext context) { + throw new QueryShardException(context, "Can only use wildcard queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); + } + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) { throw new QueryShardException(context, "Can only use regexp queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 37834b93a1e0f..857c588717b2a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -22,6 +22,8 @@ import java.util.List; import org.apache.lucene.index.Term; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiTermQuery; @@ -29,6 +31,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; @@ -74,6 +77,16 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, Quer return query; } + @Override + public Query wildcardQuery(String value, QueryShardContext context) { + Query termQuery = termQuery(value, context); + if (termQuery instanceof MatchNoDocsQuery || termQuery instanceof MatchAllDocsQuery) { + return termQuery; + } + Term term = MappedFieldType.extractTerm(termQuery); + return new WildcardQuery(term); + } + @Override public Query regexpQuery(String value, int flags, int maxDeterminizedStates, MultiTermQuery.RewriteMethod method, QueryShardContext context) { diff --git a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index 2136e030dbdb1..489286d309472 100644 --- a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -20,8 +20,6 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.WildcardQuery; @@ -185,20 +183,20 @@ public static WildcardQueryBuilder fromXContent(XContentParser parser) throws IO @Override protected Query doToQuery(QueryShardContext context) throws IOException { MappedFieldType fieldType = context.fieldMapper(fieldName); - Term term; + + Query query; if (fieldType == null) { - term = new Term(fieldName, BytesRefs.toBytesRef(value)); + Term term = new Term(fieldName, BytesRefs.toBytesRef(value)); + query = new WildcardQuery(term); } else { - Query termQuery = fieldType.termQuery(value, context); - if (termQuery instanceof MatchNoDocsQuery || termQuery instanceof MatchAllDocsQuery) { - return termQuery; - } - term = MappedFieldType.extractTerm(termQuery); + query = fieldType.wildcardQuery(value, context); } - WildcardQuery query = new WildcardQuery(term); - MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(rewrite, null, LoggingDeprecationHandler.INSTANCE); - QueryParsers.setRewriteMethod(query, rewriteMethod); + if (query instanceof MultiTermQuery) { + MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod( + rewrite, null, LoggingDeprecationHandler.INSTANCE); + QueryParsers.setRewriteMethod((MultiTermQuery) query, rewriteMethod); + } return query; } From 80c5d30f30c311544844187d412498990ba5ed5a Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Wed, 26 Sep 2018 14:24:31 -0400 Subject: [PATCH 03/25] XContentBuilder to handle BigInteger and BigDecimal (#32888) Although we allow to index BigInteger and BigDecimal into a keyword field, source filtering on these fields would fail as XContentBuilder was not able to deserialize BigInteger and BigDecimal to json. This modifies XContentBuilder to allow to handle BigInteger and BigDecimal. Closes #32395 --- .../common/xcontent/XContentBuilder.java | 80 ++++++++++++++++++- .../common/xcontent/XContentGenerator.java | 10 +++ .../xcontent/json/JsonXContentGenerator.java | 25 ++++++ .../test/search/10_source_filtering.yml | 10 +++ .../common/xcontent/BaseXContentTestCase.java | 31 +++++++ 5 files changed, 155 insertions(+), 1 deletion(-) diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index eae5e48a557f3..51a4f86a0d3b2 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -25,6 +25,8 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.math.BigDecimal; +import java.math.BigInteger; import java.nio.file.Path; import java.time.ZonedDateTime; import java.util.Arrays; @@ -103,7 +105,8 @@ public static XContentBuilder builder(XContent xContent, Set includes, S writers.put(ZonedDateTime.class, (b, v) -> b.value(v.toString())); writers.put(Calendar.class, XContentBuilder::timeValue); writers.put(GregorianCalendar.class, XContentBuilder::timeValue); - + writers.put(BigInteger.class, (b, v) -> b.value((BigInteger) v)); + writers.put(BigDecimal.class, (b, v) -> b.value((BigDecimal) v)); Map, HumanReadableTransformer> humanReadableTransformer = new HashMap<>(); Map, Function> dateTransformers = new HashMap<>(); @@ -546,6 +549,81 @@ public XContentBuilder value(short value) throws IOException { return this; } + //////////////////////////////////////////////////////////////////////////// + // BigInteger + ////////////////////////////////// + + public XContentBuilder field(String name, BigInteger value) throws IOException { + if (value == null) { + return nullField(name); + } + ensureNameNotNull(name); + generator.writeNumberField(name, value); + return this; + } + + public XContentBuilder array(String name, BigInteger[] values) throws IOException { + return field(name).values(values); + } + + private XContentBuilder values(BigInteger[] values) throws IOException { + if (values == null) { + return nullValue(); + } + startArray(); + for (BigInteger b : values) { + value(b); + } + endArray(); + return this; + } + + public XContentBuilder value(BigInteger value) throws IOException { + if (value == null) { + return nullValue(); + } + generator.writeNumber(value); + return this; + } + + + //////////////////////////////////////////////////////////////////////////// + // BigDecimal + ////////////////////////////////// + + public XContentBuilder field(String name, BigDecimal value) throws IOException { + if (value == null) { + return nullField(name); + } + ensureNameNotNull(name); + generator.writeNumberField(name, value); + return this; + } + + public XContentBuilder array(String name, BigDecimal[] values) throws IOException { + return field(name).values(values); + } + + private XContentBuilder values(BigDecimal[] values) throws IOException { + if (values == null) { + return nullValue(); + } + startArray(); + for (BigDecimal b : values) { + value(b); + } + endArray(); + return this; + } + + public XContentBuilder value(BigDecimal value) throws IOException { + if (value == null) { + return nullValue(); + } + generator.writeNumber(value); + return this; + } + //////////////////////////////////////////////////////////////////////////// // String ////////////////////////////////// diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java index 142c1e399c78c..48a82d9165511 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java @@ -23,6 +23,8 @@ import java.io.Flushable; import java.io.IOException; import java.io.InputStream; +import java.math.BigDecimal; +import java.math.BigInteger; public interface XContentGenerator extends Closeable, Flushable { @@ -70,6 +72,14 @@ public interface XContentGenerator extends Closeable, Flushable { void writeNumber(short value) throws IOException; + void writeNumber(BigInteger value) throws IOException; + + void writeNumberField(String name, BigInteger value) throws IOException; + + void writeNumber(BigDecimal value) throws IOException; + + void writeNumberField(String name, BigDecimal value) throws IOException; + void writeStringField(String name, String value) throws IOException; void writeString(String value) throws IOException; diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java index 6f09174a573eb..97d25653ad687 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java @@ -42,6 +42,8 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.math.BigDecimal; +import java.math.BigInteger; import java.util.Objects; import java.util.Set; @@ -226,6 +228,19 @@ public void writeNumberField(String name, int value) throws IOException { generator.writeNumberField(name, value); } + @Override + public void writeNumberField(String name, BigInteger value) throws IOException { + // as jackson's JsonGenerator doesn't have this method for BigInteger + // we have to implement it ourselves + generator.writeFieldName(name); + generator.writeNumber(value); + } + + @Override + public void writeNumberField(String name, BigDecimal value) throws IOException { + generator.writeNumberField(name, value); + } + @Override public void writeNumber(int value) throws IOException { generator.writeNumber(value); @@ -246,6 +261,16 @@ public void writeNumber(short value) throws IOException { generator.writeNumber(value); } + @Override + public void writeNumber(BigInteger value) throws IOException { + generator.writeNumber(value); + } + + @Override + public void writeNumber(BigDecimal value) throws IOException { + generator.writeNumber(value); + } + @Override public void writeStringField(String name, String value) throws IOException { generator.writeStringField(name, value); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml index a5f504647945f..1b5f9856391b0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml @@ -112,6 +112,16 @@ setup: - match: { hits.hits.0._source.bigint: 72057594037927936 } - is_false: hits.hits.0._source.include.field2 + +--- +"_source filtering on bigint": +- do: + search: + body: + _source: ["bigint"] + query: { match_all: {} } +- match: { hits.hits.0._source.bigint: 72057594037927936 } + --- "fields in body": - do: diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 3fb5f5996be79..38e75b921fa72 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -48,6 +48,7 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.math.BigDecimal; import java.math.BigInteger; import java.nio.file.Path; import java.time.DayOfWeek; @@ -266,6 +267,36 @@ public void testShorts() throws IOException { .endObject()); } + public void testBigIntegers() throws Exception { + assertResult("{'bigint':null}", () -> builder().startObject().field("bigint", (BigInteger) null).endObject()); + assertResult("{'bigint':[]}", () -> builder().startObject().array("bigint", new BigInteger[]{}).endObject()); + + BigInteger bigInteger = BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE); + String result = "{'bigint':" + bigInteger.toString() + "}"; + assertResult(result, () -> builder().startObject().field("bigint", bigInteger).endObject()); + + result = "{'bigint':[" + bigInteger.toString() + "," + bigInteger.toString() + "," + bigInteger.toString() +"]}"; + assertResult(result, () -> builder() + .startObject() + .array("bigint", bigInteger, bigInteger, bigInteger) + .endObject()); + } + + public void testBigDecimals() throws Exception { + assertResult("{'bigdecimal':null}", () -> builder().startObject().field("bigdecimal", (BigInteger) null).endObject()); + assertResult("{'bigdecimal':[]}", () -> builder().startObject().array("bigdecimal", new BigInteger[]{}).endObject()); + + BigDecimal bigDecimal = new BigDecimal("234.43"); + String result = "{'bigdecimal':" + bigDecimal.toString() + "}"; + assertResult(result, () -> builder().startObject().field("bigdecimal", bigDecimal).endObject()); + + result = "{'bigdecimal':[" + bigDecimal.toString() + "," + bigDecimal.toString() + "," + bigDecimal.toString() +"]}"; + assertResult(result, () -> builder() + .startObject() + .array("bigdecimal", bigDecimal, bigDecimal, bigDecimal) + .endObject()); + } + public void testStrings() throws IOException { assertResult("{'string':null}", () -> builder().startObject().field("string", (String) null).endObject()); assertResult("{'string':'value'}", () -> builder().startObject().field("string", "value").endObject()); From 85e4ef3429ae2aa3b4ff07a7ceb9ba07bc921756 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 26 Sep 2018 13:22:48 -0700 Subject: [PATCH 04/25] Scripting: Reflect factory signatures in painless classloader (#34088) It is sometimes desirable to pass a class into a script constructor that will not actually be exposed in the script whitelist. This commit uses reflection when creating the compiler to find all the classes of the factory method signature, and make the classloader that wraps lookup also expose these classes. --- .../org/elasticsearch/painless/Compiler.java | 60 ++++++++++++------- 1 file changed, 40 insertions(+), 20 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 97dddbdfe52c5..e6ed475a7be9f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -28,11 +28,14 @@ import org.objectweb.asm.util.Printer; import java.lang.reflect.Constructor; +import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.security.CodeSource; import java.security.SecureClassLoader; import java.security.cert.Certificate; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; @@ -89,16 +92,11 @@ final class Loader extends SecureClassLoader { */ @Override public Class findClass(String name) throws ClassNotFoundException { - if (scriptClass.getName().equals(name)) { - return scriptClass; + Class found = additionalClasses.get(name); + if (found != null) { + return found; } - if (factoryClass != null && factoryClass.getName().equals(name)) { - return factoryClass; - } - if (statefulFactoryClass != null && statefulFactoryClass.getName().equals(name)) { - return statefulFactoryClass; - } - Class found = painlessLookup.canonicalTypeNameToType(name.replace('$', '.')); + found = painlessLookup.canonicalTypeNameToType(name.replace('$', '.')); return found != null ? found : super.findClass(name); } @@ -156,19 +154,14 @@ public Loader createLoader(ClassLoader parent) { private final Class scriptClass; /** - * The class/interface to create the {@code scriptClass} instance. - */ - private final Class factoryClass; - - /** - * An optional class/interface to create the {@code factoryClass} instance. + * The whitelist the script will use. */ - private final Class statefulFactoryClass; + private final PainlessLookup painlessLookup; /** - * The whitelist the script will use. + * Classes that do not exist in the lookup, but are needed by the script factories. */ - private final PainlessLookup painlessLookup; + private final Map> additionalClasses; /** * Standard constructor. @@ -179,9 +172,36 @@ public Loader createLoader(ClassLoader parent) { */ Compiler(Class scriptClass, Class factoryClass, Class statefulFactoryClass, PainlessLookup painlessLookup) { this.scriptClass = scriptClass; - this.factoryClass = factoryClass; - this.statefulFactoryClass = statefulFactoryClass; this.painlessLookup = painlessLookup; + Map> additionalClasses = new HashMap<>(); + additionalClasses.put(scriptClass.getName(), scriptClass); + addFactoryMethod(additionalClasses, factoryClass, "newInstance"); + addFactoryMethod(additionalClasses, statefulFactoryClass, "newFactory"); + addFactoryMethod(additionalClasses, statefulFactoryClass, "newInstance"); + this.additionalClasses = Collections.unmodifiableMap(additionalClasses); + } + + private static void addFactoryMethod(Map> additionalClasses, Class factoryClass, String methodName) { + if (factoryClass == null) { + return; + } + + Method factoryMethod = null; + for (Method method : factoryClass.getMethods()) { + if (methodName.equals(method.getName())) { + factoryMethod = method; + break; + } + } + if (factoryMethod == null) { + return; + } + + additionalClasses.put(factoryClass.getName(), factoryClass); + for (int i = 0; i < factoryMethod.getParameterTypes().length; ++i) { + Class parameterClazz = factoryMethod.getParameterTypes()[i]; + additionalClasses.put(parameterClazz.getName(), parameterClazz); + } } /** From fcb60acc3481ce648e9c8fc810b00d8b3f5cb81b Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Wed, 26 Sep 2018 14:27:35 -0600 Subject: [PATCH 05/25] Calculate changed roles on roles.yml reload (#33525) In order to optimize the use of the role cache, when the roles.yml file is reloaded we now calculate the names of removed, changed, and added roles so that they may be passed to any listeners. This allows a listener to selectively clear cache for only the roles that have been modified. The CompositeRolesStore has been adapted to do exactly that so that we limit the need to reload roles from sources such as the native roles stores or external role providers. See #33205 --- .../authz/store/CompositeRolesStore.java | 21 ++++++-- .../security/authz/store/FileRolesStore.java | 45 +++++++++++----- .../authz/store/CompositeRolesStoreTests.java | 3 +- .../authz/store/FileRolesStoreTests.java | 53 ++++++++++++++++++- 4 files changed, 102 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index beb2ca60fb2ae..7e1cc49e2c0bc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -97,9 +97,7 @@ public CompositeRolesStore(Settings settings, FileRolesStore fileRolesStore, Nat ThreadContext threadContext, XPackLicenseState licenseState) { super(settings); this.fileRolesStore = fileRolesStore; - // invalidating all on a file based role update is heavy handed to say the least, but in general this should be infrequent so the - // impact isn't really worth the added complexity of only clearing the changed values - fileRolesStore.addListener(this::invalidateAll); + fileRolesStore.addListener(this::invalidate); this.nativeRolesStore = nativeRolesStore; this.reservedRolesStore = reservedRolesStore; this.privilegeStore = privilegeStore; @@ -356,6 +354,23 @@ public void invalidate(String role) { negativeLookupCache.remove(role); } + public void invalidate(Set roles) { + numInvalidation.incrementAndGet(); + + // the cache cannot be modified while doing this operation per the terms of the cache iterator + try (ReleasableLock ignored = writeLock.acquire()) { + Iterator> keyIter = roleCache.keys().iterator(); + while (keyIter.hasNext()) { + Set key = keyIter.next(); + if (Sets.haveEmptyIntersection(key, roles) == false) { + keyIter.remove(); + } + } + } + + negativeLookupCache.removeAll(roles); + } + public void usageStats(ActionListener> listener) { final Map usage = new HashMap<>(2); usage.put("file", fileRolesStore.usageStats()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index 59bc8042fbaff..868a7076b8b1f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; @@ -34,13 +35,16 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Consumer; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -52,16 +56,16 @@ public class FileRolesStore extends AbstractComponent { private final Path file; private final XPackLicenseState licenseState; - private final List listeners = new ArrayList<>(); + private final List>> listeners = new ArrayList<>(); private volatile Map permissions; public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState) throws IOException { - this(settings, env, watcherService, () -> {}, licenseState); + this(settings, env, watcherService, null, licenseState); } - FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Runnable listener, + FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Consumer> listener, XPackLicenseState licenseState) throws IOException { super(settings); this.file = resolveFile(env); @@ -76,9 +80,10 @@ public FileRolesStore(Settings settings, Environment env, ResourceWatcherService } public Set roleDescriptors(Set roleNames) { + final Map localPermissions = permissions; Set descriptors = new HashSet<>(); roleNames.forEach((name) -> { - RoleDescriptor descriptor = permissions.get(name); + RoleDescriptor descriptor = localPermissions.get(name); if (descriptor != null) { descriptors.add(descriptor); } @@ -87,12 +92,13 @@ public Set roleDescriptors(Set roleNames) { } public Map usageStats() { + final Map localPermissions = permissions; Map usageStats = new HashMap<>(3); - usageStats.put("size", permissions.size()); + usageStats.put("size", localPermissions.size()); boolean dls = false; boolean fls = false; - for (RoleDescriptor descriptor : permissions.values()) { + for (RoleDescriptor descriptor : localPermissions.values()) { for (IndicesPrivileges indicesPrivileges : descriptor.getIndicesPrivileges()) { fls = fls || indicesPrivileges.getGrantedFields() != null || indicesPrivileges.getDeniedFields() != null; dls = dls || indicesPrivileges.getQuery() != null; @@ -107,10 +113,10 @@ public Map usageStats() { return usageStats; } - public void addListener(Runnable runnable) { - Objects.requireNonNull(runnable); + public void addListener(Consumer> consumer) { + Objects.requireNonNull(consumer); synchronized (this) { - listeners.add(runnable); + listeners.add(consumer); } } @@ -118,6 +124,11 @@ public Path getFile() { return file; } + // package private for testing + Set getAllRoleNames() { + return permissions.keySet(); + } + public static Path resolveFile(Environment env) { return XPackPlugin.resolveConfigFile(env, "roles.yml"); } @@ -319,11 +330,13 @@ public void onFileDeleted(Path file) { } @Override - public void onFileChanged(Path file) { + public synchronized void onFileChanged(Path file) { if (file.equals(FileRolesStore.this.file)) { + final Map previousPermissions = permissions; try { permissions = parseFile(file, logger, settings, licenseState); - logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(), Files.exists(file) ? "changed" : "removed"); + logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(), + Files.exists(file) ? "changed" : "removed"); } catch (Exception e) { logger.error( (Supplier) () -> new ParameterizedMessage( @@ -331,9 +344,13 @@ public void onFileChanged(Path file) { return; } - synchronized (FileRolesStore.this) { - listeners.forEach(Runnable::run); - } + final Set changedOrMissingRoles = Sets.difference(previousPermissions.entrySet(), permissions.entrySet()) + .stream() + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + final Set addedRoles = Sets.difference(permissions.keySet(), previousPermissions.keySet()); + final Set changedRoles = Collections.unmodifiableSet(Sets.union(changedOrMissingRoles, addedRoles)); + listeners.forEach(c -> c.accept(changedRoles)); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 0c2ab1ecc7650..9f1490856d67b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -53,6 +53,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; @@ -213,7 +214,7 @@ public void testNegativeLookupsAreCached() { new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS)); - verify(fileRolesStore).addListener(any(Runnable.class)); // adds a listener in ctor + verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor final String roleName = randomAlphaOfLengthBetween(1, 10); PlainActionFuture future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 5cb93b898ba52..0763ff65ec562 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -37,6 +37,7 @@ import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -319,8 +320,11 @@ public void testAutoReload() throws Exception { threadPool = new TestThreadPool("test"); watcherService = new ResourceWatcherService(settings, threadPool); final CountDownLatch latch = new CountDownLatch(1); - FileRolesStore store = new FileRolesStore(settings, env, watcherService, latch::countDown, - new XPackLicenseState(Settings.EMPTY)); + final Set modifiedRoles = new HashSet<>(); + FileRolesStore store = new FileRolesStore(settings, env, watcherService, roleSet -> { + modifiedRoles.addAll(roleSet); + latch.countDown(); + }, new XPackLicenseState(Settings.EMPTY)); Set descriptors = store.roleDescriptors(Collections.singleton("role1")); assertThat(descriptors, notNullValue()); @@ -344,6 +348,8 @@ public void testAutoReload() throws Exception { fail("Waited too long for the updated file to be picked up"); } + assertEquals(1, modifiedRoles.size()); + assertTrue(modifiedRoles.contains("role5")); final TransportRequest request = mock(TransportRequest.class); descriptors = store.roleDescriptors(Collections.singleton("role5")); assertThat(descriptors, notNullValue()); @@ -354,6 +360,49 @@ public void testAutoReload() throws Exception { assertThat(role.cluster().check("cluster:monitor/foo/bar", request), is(true)); assertThat(role.cluster().check("cluster:admin/foo/bar", request), is(false)); + // truncate to remove some + final Set truncatedFileRolesModified = new HashSet<>(); + final CountDownLatch truncateLatch = new CountDownLatch(1); + store = new FileRolesStore(settings, env, watcherService, roleSet -> { + truncatedFileRolesModified.addAll(roleSet); + truncateLatch.countDown(); + }, new XPackLicenseState(Settings.EMPTY)); + + final Set allRolesPreTruncate = store.getAllRoleNames(); + try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) { + writer.append("role5:").append(System.lineSeparator()); + writer.append(" cluster:").append(System.lineSeparator()); + writer.append(" - 'MONITOR'"); + } + + truncateLatch.await(); + assertEquals(allRolesPreTruncate.size() - 1, truncatedFileRolesModified.size()); + assertTrue(allRolesPreTruncate.contains("role5")); + assertFalse(truncatedFileRolesModified.contains("role5")); + descriptors = store.roleDescriptors(Collections.singleton("role5")); + assertThat(descriptors, notNullValue()); + assertEquals(1, descriptors.size()); + + // modify + final Set modifiedFileRolesModified = new HashSet<>(); + final CountDownLatch modifyLatch = new CountDownLatch(1); + store = new FileRolesStore(settings, env, watcherService, roleSet -> { + modifiedFileRolesModified.addAll(roleSet); + modifyLatch.countDown(); + }, new XPackLicenseState(Settings.EMPTY)); + + try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) { + writer.append("role5:").append(System.lineSeparator()); + writer.append(" cluster:").append(System.lineSeparator()); + writer.append(" - 'ALL'"); + } + + modifyLatch.await(); + assertEquals(1, modifiedFileRolesModified.size()); + assertTrue(modifiedFileRolesModified.contains("role5")); + descriptors = store.roleDescriptors(Collections.singleton("role5")); + assertThat(descriptors, notNullValue()); + assertEquals(1, descriptors.size()); } finally { if (watcherService != null) { watcherService.stop(); From a48b86e7c632b1c51e7f424b19d5c03780c09cc8 Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Wed, 26 Sep 2018 14:42:22 -0600 Subject: [PATCH 06/25] Security: use default scroll keepalive (#33639) Security previously hardcoded a default scroll keepalive of 10 seconds, but in some cases this is not enough time as there can be network issues or overloading of host machines. After this change, security will now use the default keepalive timeout, which is controllable using a setting and the default value is 5 minutes. --- .../xpack/core/security/ScrollHelper.java | 11 ++++++++--- .../xpack/security/authc/TokenService.java | 3 ++- .../security/authc/esnative/NativeUsersStore.java | 4 ++-- .../authc/support/mapper/NativeRoleMappingStore.java | 4 ++-- .../security/authz/store/NativePrivilegeStore.java | 4 ++-- .../xpack/security/authz/store/NativeRolesStore.java | 3 ++- .../xpack/security/ScrollHelperIntegTests.java | 1 + .../xpack/security/audit/index/AuditTrailTests.java | 2 ++ 8 files changed, 21 insertions(+), 11 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java index a481f8803111a..97f8eb5fa11ba 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java @@ -5,6 +5,9 @@ */ package org.elasticsearch.xpack.core.security; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.SearchRequest; @@ -12,7 +15,6 @@ import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.Client; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.SearchHit; @@ -25,6 +27,7 @@ public final class ScrollHelper { + private static final Logger LOGGER = LogManager.getLogger(ScrollHelper.class); private ScrollHelper() {} /** @@ -35,13 +38,15 @@ public static void fetchAllByEntity(Client client, SearchRequest request, fi Function hitParser) { final List results = new ArrayList<>(); if (request.scroll() == null) { // we do scroll by default lets see if we can get rid of this at some point. - request.scroll(TimeValue.timeValueSeconds(10L)); + throw new IllegalArgumentException("request must have scroll set"); } final Consumer clearScroll = (response) -> { if (response != null && response.getScrollId() != null) { ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); clearScrollRequest.addScrollId(response.getScrollId()); - client.clearScroll(clearScrollRequest, ActionListener.wrap((r) -> {}, (e) -> {})); + client.clearScroll(clearScrollRequest, ActionListener.wrap((r) -> {}, e -> + LOGGER.warn(new ParameterizedMessage("clear scroll failed for scroll id [{}]", response.getScrollId()), e) + )); } }; // This function is MADNESS! But it works, don't think about it too hard... diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 937bd22d98206..4f1ec4ad8c087 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -118,6 +118,7 @@ import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -846,7 +847,7 @@ public void findActiveTokensForRealm(String realmName, ActionListener> final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(query) .setSize(1000) .setFetchSource(true) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 677d13082ca90..b45de8184d6e3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -56,6 +55,7 @@ import static org.elasticsearch.action.DocWriteResponse.Result.CREATED; import static org.elasticsearch.action.DocWriteResponse.Result.DELETED; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -129,7 +129,7 @@ void loadMappings(ActionListener> listener) { final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setTypes(SECURITY_GENERIC_TYPE) .setQuery(query) .setSize(1000) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java index 807cfff6c2c19..2cfa89b647ceb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.iterable.Iterables; @@ -56,6 +55,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -115,7 +115,7 @@ public void getPrivileges(Collection applications, Collection na final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(query) .setSize(1000) .setFetchSource(true) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index e578a4005c4ee..e032d524038a7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -59,6 +59,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -120,7 +121,7 @@ public void getRoleDescriptors(String[] names, final ActionListener supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(query) .setSize(1000) .setFetchSource(true) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java index 7ab26b0c33fef..3d623f343c3b2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java @@ -79,6 +79,7 @@ public void testFetchAllByEntityWithBrokenScroll() { when(client.threadPool()).thenReturn(threadPool); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); SearchRequest request = new SearchRequest(); + request.scroll(TimeValue.timeValueHours(10L)); String scrollId = randomAlphaOfLength(5); SearchHit[] hits = new SearchHit[] {new SearchHit(1), new SearchHit(2)}; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index ef3c6aa56aeaf..022328f426fa3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.Requests; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.SecurityIntegTestCase; @@ -161,6 +162,7 @@ private Collection> getAuditEvents() throws Exception { client.admin().indices().refresh(Requests.refreshRequest(indexName)).get(); final SearchRequest request = client.prepareSearch(indexName) + .setScroll(TimeValue.timeValueMinutes(10L)) .setTypes(IndexAuditTrail.DOC_TYPE) .setQuery(QueryBuilders.matchAllQuery()) .setSize(1000) From 2b730d1b9d7bfdfeee76d7cf8701a2a74ad0fc1c Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 26 Sep 2018 23:50:31 +0200 Subject: [PATCH 07/25] Mute MovAvgIT#testHoltWintersNotEnoughData Relates to #34098 --- .../search/aggregations/pipeline/moving/avg/MovAvgIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 49c700548f946..e9483f8be1fa1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -847,6 +847,7 @@ public void testNegativePrediction() { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34098") public void testHoltWintersNotEnoughData() { Client client = client(); expectThrows(SearchPhaseExecutionException.class, () -> client.prepareSearch("idx").setTypes("type") From e4c3fd6d9624882abb97ec5b4d67141d80428635 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 26 Sep 2018 14:59:31 -0700 Subject: [PATCH 08/25] [DOCS] Moves graph to docs folder (#33472) --- docs/build.gradle | 1 + .../docs/en/rest-api => docs/reference}/graph/explore.asciidoc | 1 + docs/reference/rest-api/index.asciidoc | 2 +- x-pack/docs/build.gradle | 1 - 4 files changed, 3 insertions(+), 2 deletions(-) rename {x-pack/docs/en/rest-api => docs/reference}/graph/explore.asciidoc (99%) diff --git a/docs/build.gradle b/docs/build.gradle index 864567ba8358a..935149bdc84a5 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -100,6 +100,7 @@ buildRestTests.docs = fileTree(projectDir) { exclude 'reference/rollup/apis/delete-job.asciidoc' exclude 'reference/rollup/apis/get-job.asciidoc' exclude 'reference/rollup/apis/rollup-caps.asciidoc' + exclude 'reference/graph/explore.asciidoc' } listSnippets.docs = buildRestTests.docs diff --git a/x-pack/docs/en/rest-api/graph/explore.asciidoc b/docs/reference/graph/explore.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/graph/explore.asciidoc rename to docs/reference/graph/explore.asciidoc index f9902fcbe48a8..91b07a631292b 100644 --- a/x-pack/docs/en/rest-api/graph/explore.asciidoc +++ b/docs/reference/graph/explore.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="platinum"] [[graph-explore-api]] == Explore API diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index b80e8badf5bb3..c6243ab25981b 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -19,7 +19,7 @@ directly to configure and access {xpack} features. include::info.asciidoc[] -include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[] +include::{es-repo-dir}/graph/explore.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] include::{es-repo-dir}/migration/migration.asciidoc[] include::{es-repo-dir}/ml/apis/ml-api.asciidoc[] diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index f027493b0abed..59d89024db5be 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -92,7 +92,6 @@ buildRestTests.docs = fileTree(projectDir) { exclude 'build' // These file simply doesn't pass yet. We should figure out how to fix them. exclude 'en/watcher/reference/actions.asciidoc' - exclude 'en/rest-api/graph/explore.asciidoc' } Map setups = buildRestTests.setups From 1d08f63effcb387473e2adedae2af79427a47cc0 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Wed, 26 Sep 2018 15:08:35 -0700 Subject: [PATCH 09/25] When creating wildcard queries, use MatchNoDocsQuery when the field type doesn't exist. (#34093) --- .../ICUCollationKeywordFieldMapper.java | 5 ++- .../index/mapper/CollationFieldTypeTests.java | 2 +- .../index/mapper/IndexFieldMapper.java | 5 ++- .../index/mapper/MappedFieldType.java | 4 ++- .../index/mapper/StringFieldType.java | 16 ++++++---- .../index/query/WildcardQueryBuilder.java | 19 +++--------- .../query/WildcardQueryBuilderTests.java | 31 ++++++++----------- 7 files changed, 40 insertions(+), 42 deletions(-) diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java index c44152dd5013c..2c20d4b47844e 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -167,7 +168,9 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, Quer } @Override - public Query wildcardQuery(String value, QueryShardContext context) { + public Query wildcardQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { throw new UnsupportedOperationException("[wildcard] queries are not supported on [" + CONTENT_TYPE + "] fields."); } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java index f90971412358d..a261e8b3b7e9a 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java @@ -126,7 +126,7 @@ public void testWildcardQuery() { ft.setName("field"); ft.setIndexOptions(IndexOptions.DOCS); expectThrows(UnsupportedOperationException.class, - () -> ft.wildcardQuery("foo*", null)); + () -> ft.wildcardQuery("foo*", null, null)); } public void testRangeQuery() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index fac00907980fd..456805e64160e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -22,6 +22,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; @@ -151,7 +152,9 @@ public Query termsQuery(List values, QueryShardContext context) { } @Override - public Query wildcardQuery(String value, QueryShardContext context) { + public Query wildcardQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) { return Queries.newMatchAllQuery(); } else { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 5a2c8991e0aaa..45bb5ed395dc0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -345,7 +345,9 @@ public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod me throw new QueryShardException(context, "Can only use prefix queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } - public Query wildcardQuery(String value, QueryShardContext context) { + public Query wildcardQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { throw new QueryShardException(context, "Can only use wildcard queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 857c588717b2a..cde8e392dabb8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -19,23 +19,24 @@ package org.elasticsearch.index.mapper; -import java.util.List; - import org.apache.lucene.index.Term; +import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.TermInSetQuery; -import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.support.QueryParsers; + +import java.util.List; /** Base class for {@link MappedFieldType} implementations that use the same * representation for internal index terms as the external representation so @@ -78,13 +79,16 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, Quer } @Override - public Query wildcardQuery(String value, QueryShardContext context) { + public Query wildcardQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) { Query termQuery = termQuery(value, context); if (termQuery instanceof MatchNoDocsQuery || termQuery instanceof MatchAllDocsQuery) { return termQuery; } Term term = MappedFieldType.extractTerm(termQuery); - return new WildcardQuery(term); + + WildcardQuery query = new WildcardQuery(term); + QueryParsers.setRewriteMethod(query, method); + return query; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index 489286d309472..697e7ac864e85 100644 --- a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -19,16 +19,14 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.WildcardQuery; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -184,20 +182,13 @@ public static WildcardQueryBuilder fromXContent(XContentParser parser) throws IO protected Query doToQuery(QueryShardContext context) throws IOException { MappedFieldType fieldType = context.fieldMapper(fieldName); - Query query; if (fieldType == null) { - Term term = new Term(fieldName, BytesRefs.toBytesRef(value)); - query = new WildcardQuery(term); - } else { - query = fieldType.wildcardQuery(value, context); + return new MatchNoDocsQuery("unknown field [" + fieldName + "]"); } - if (query instanceof MultiTermQuery) { - MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod( - rewrite, null, LoggingDeprecationHandler.INSTANCE); - QueryParsers.setRewriteMethod((MultiTermQuery) query, rewriteMethod); - } - return query; + MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod( + rewrite, null, LoggingDeprecationHandler.INSTANCE); + return fieldType.wildcardQuery(value, method, context); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java index 73aa1667f30f4..f5adb70c9fecc 100644 --- a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -70,13 +69,19 @@ private static WildcardQueryBuilder randomWildcardQuery() { @Override protected void doAssertLuceneQuery(WildcardQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { - assertThat(query, instanceOf(WildcardQuery.class)); - WildcardQuery wildcardQuery = (WildcardQuery) query; - String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); - assertThat(wildcardQuery.getField(), equalTo(expectedFieldName)); - assertThat(wildcardQuery.getTerm().field(), equalTo(expectedFieldName)); - assertThat(wildcardQuery.getTerm().text(), equalTo(queryBuilder.value())); + + if (expectedFieldName.equals(STRING_FIELD_NAME)) { + assertThat(query, instanceOf(WildcardQuery.class)); + WildcardQuery wildcardQuery = (WildcardQuery) query; + + assertThat(wildcardQuery.getField(), equalTo(expectedFieldName)); + assertThat(wildcardQuery.getTerm().field(), equalTo(expectedFieldName)); + assertThat(wildcardQuery.getTerm().text(), equalTo(queryBuilder.value())); + } else { + Query expected = new MatchNoDocsQuery("unknown field [" + expectedFieldName + "]"); + assertEquals(expected, query); + } } public void testIllegalArguments() { @@ -92,7 +97,7 @@ public void testIllegalArguments() { public void testEmptyValue() throws IOException { QueryShardContext context = createShardContext(); context.setAllowUnmappedFields(true); - WildcardQueryBuilder wildcardQueryBuilder = new WildcardQueryBuilder("doc", ""); + WildcardQueryBuilder wildcardQueryBuilder = new WildcardQueryBuilder(STRING_FIELD_NAME, ""); assertEquals(wildcardQueryBuilder.toQuery(context).getClass(), WildcardQuery.class); } @@ -130,16 +135,6 @@ public void testParseFailsWithMultipleFields() throws IOException { assertEquals("[wildcard] query doesn't support multiple fields, found [user1] and [user2]", e.getMessage()); } - public void testWithMetaDataField() throws IOException { - QueryShardContext context = createShardContext(); - for (String field : new String[]{"field1", "field2"}) { - WildcardQueryBuilder wildcardQueryBuilder = new WildcardQueryBuilder(field, "toto"); - Query query = wildcardQueryBuilder.toQuery(context); - Query expected = new WildcardQuery(new Term(field, "toto")); - assertEquals(expected, query); - } - } - public void testIndexWildcard() throws IOException { QueryShardContext context = createShardContext(); String index = context.getFullyQualifiedIndex().getName(); From ae8e54493dfb5e14feca6d8fe0ffba7bc045aff8 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 27 Sep 2018 00:16:17 +0200 Subject: [PATCH 10/25] Build DocStats from SegmentInfos in ReadOnlyEngine (#34079) This change is related to #33903 that ports the DocStats simplification to the master branch. This change builds the docStats in the ReadOnlyEngine from the last committed segment infos rather than the reader. Co-authored-by: Tanguy Leroux --- .../index/engine/ReadOnlyEngine.java | 27 +++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java index 7848921b67e2e..26ef259a1e1c6 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -21,6 +21,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SoftDeletesDirectoryReaderWrapper; import org.apache.lucene.search.IndexSearcher; @@ -95,7 +96,7 @@ public ReadOnlyEngine(EngineConfig config, SeqNoStats seqNoStats, TranslogStats this.lastCommittedSegmentInfos = Lucene.readSegmentInfos(directory); this.translogStats = translogStats == null ? new TranslogStats(0, 0, 0, 0, 0) : translogStats; this.seqNoStats = seqNoStats == null ? buildSeqNoStats(lastCommittedSegmentInfos) : seqNoStats; - reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), config.getShardId()); + reader = ElasticsearchDirectoryReader.wrap(open(directory), config.getShardId()); if (config.getIndexSettings().isSoftDeleteEnabled()) { reader = new SoftDeletesDirectoryReaderWrapper(reader, Lucene.SOFT_DELETES_FIELD); } @@ -103,7 +104,7 @@ public ReadOnlyEngine(EngineConfig config, SeqNoStats seqNoStats, TranslogStats this.indexCommit = reader.getIndexCommit(); this.searcherManager = new SearcherManager(reader, new RamAccountingSearcherFactory(engineConfig.getCircuitBreakerService())); - this.docsStats = docsStats(reader); + this.docsStats = docsStats(lastCommittedSegmentInfos); this.indexWriterLock = indexWriterLock; success = true; } finally { @@ -116,6 +117,28 @@ public ReadOnlyEngine(EngineConfig config, SeqNoStats seqNoStats, TranslogStats } } + protected DirectoryReader open(final Directory directory) throws IOException { + return DirectoryReader.open(directory); + } + + private DocsStats docsStats(final SegmentInfos lastCommittedSegmentInfos) { + long numDocs = 0; + long numDeletedDocs = 0; + long sizeInBytes = 0; + if (lastCommittedSegmentInfos != null) { + for (SegmentCommitInfo segmentCommitInfo : lastCommittedSegmentInfos) { + numDocs += segmentCommitInfo.info.maxDoc() - segmentCommitInfo.getDelCount() - segmentCommitInfo.getSoftDelCount(); + numDeletedDocs += segmentCommitInfo.getDelCount() + segmentCommitInfo.getSoftDelCount(); + try { + sizeInBytes += segmentCommitInfo.sizeInBytes(); + } catch (IOException e) { + throw new UncheckedIOException("Failed to get size for [" + segmentCommitInfo.info.name + "]", e); + } + } + } + return new DocsStats(numDocs, numDeletedDocs, sizeInBytes); + } + @Override protected void closeNoLock(String reason, CountDownLatch closedLatch) { if (isClosed.compareAndSet(false, true)) { From 12d94e44b8306f315f9e936bc46be55fd867122b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 26 Sep 2018 18:39:10 -0400 Subject: [PATCH 11/25] Adjust bwc version for max_seq_no_of_updates Relates #33967 Relates #33842 --- .../support/replication/TransportReplicationAction.java | 4 ++-- .../java/org/elasticsearch/index/engine/InternalEngine.java | 2 +- .../main/java/org/elasticsearch/index/shard/IndexShard.java | 4 ++-- .../indices/recovery/RecoveryTranslogOperationsRequest.java | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index dff635f6e7489..695c9162633f6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -1295,7 +1295,7 @@ public void readFrom(StreamInput in) throws IOException { } else { globalCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO; } - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { maxSeqNoOfUpdatesOrDeletes = in.readZLong(); } else { // UNASSIGNED_SEQ_NO (-2) means uninitialized, and replicas will disable @@ -1310,7 +1310,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha1)) { out.writeZLong(globalCheckpoint); } - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { out.writeZLong(maxSeqNoOfUpdatesOrDeletes); } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 5bd1a473fb97b..5f770e0d93baa 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -2561,7 +2561,7 @@ private boolean assertMaxSeqNoOfUpdatesIsAdvanced(Term id, long seqNo, boolean a final long maxSeqNoOfUpdates = getMaxSeqNoOfUpdatesOrDeletes(); // If the primary is on an old version which does not replicate msu, we need to relax this assertion for that. if (maxSeqNoOfUpdates == SequenceNumbers.UNASSIGNED_SEQ_NO) { - assert config().getIndexSettings().getIndexVersionCreated().before(Version.V_7_0_0_alpha1); + assert config().getIndexSettings().getIndexVersionCreated().before(Version.V_6_5_0); return true; } // We treat a delete on the tombstones on replicas as a regular document, then use updateDocument (not addDocument). diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 28ff3c9309644..11d8f44bef133 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -504,7 +504,7 @@ public void updateShardState(final ShardRouting newRouting, if (getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO) { // If the old primary was on an old version that did not replicate the msu, // we need to bootstrap it manually from its local history. - assert indexSettings.getIndexVersionCreated().before(Version.V_7_0_0_alpha1); + assert indexSettings.getIndexVersionCreated().before(Version.V_6_5_0); engine.advanceMaxSeqNoOfUpdatesOrDeletes(seqNoStats().getMaxSeqNo()); } engine.restoreLocalHistoryFromTranslog((resettingEngine, snapshot) -> @@ -1957,7 +1957,7 @@ public void activateWithPrimaryContext(final ReplicationTracker.PrimaryContext p if (getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO) { // If the old primary was on an old version that did not replicate the msu, // we need to bootstrap it manually from its local history. - assert indexSettings.getIndexVersionCreated().before(Version.V_7_0_0_alpha1); + assert indexSettings.getIndexVersionCreated().before(Version.V_6_5_0); getEngine().advanceMaxSeqNoOfUpdatesOrDeletes(seqNoStats().getMaxSeqNo()); } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java index 58b5fb927b5d4..0ae5d507eb357 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java @@ -89,7 +89,7 @@ public void readFrom(StreamInput in) throws IOException { } else { maxSeenAutoIdTimestampOnPrimary = IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP; } - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { maxSeqNoOfUpdatesOrDeletesOnPrimary = in.readZLong(); } else { // UNASSIGNED_SEQ_NO means uninitialized and replica won't enable optimization using seq_no @@ -107,7 +107,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_6_5_0)) { out.writeZLong(maxSeenAutoIdTimestampOnPrimary); } - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { out.writeZLong(maxSeqNoOfUpdatesOrDeletesOnPrimary); } } From ea9b33527e6d3d1086a401af668249cd679d2dca Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 26 Sep 2018 22:38:25 -0400 Subject: [PATCH 12/25] TEST: Add engine is closed as expected failure msg This commit adds "engine is closed" as an expected failure message. This change is due to #33967 in which we might access a closed engine on promotion. Relates #33967 --- .../index/replication/IndexLevelReplicationTests.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 22576f8818975..5fca37a71886d 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -381,7 +381,8 @@ public void testReplicaOperationWithConcurrentPrimaryPromotion() throws Exceptio indexOnReplica(replicationRequest, shards, replica, primaryPrimaryTerm); successFullyIndexed.set(true); } catch (IllegalStateException ise) { - assertThat(ise.getMessage(), either(containsString("is too old")).or(containsString("cannot be a replication target"))); + assertThat(ise.getMessage(), either(containsString("is too old")) + .or(containsString("cannot be a replication target")).or(containsString("engine is closed"))); } catch (Exception e) { throw new RuntimeException(e); } From acd80a1e0722536fa18220d640ac423155823954 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 27 Sep 2018 06:02:24 +0200 Subject: [PATCH 13/25] TESTS: Enable DEBUG Logging in Flaky Test (#34091) * This should surface what errors are thrown on CI and in org.elasticsearch.transport.RemoteClusterConnection.ConnectHandler#collectRemoteNodes (the sequence of caught error in the last catch block and moving on to the next seed node seems to be the only path by which the errors logged in #33756 could come about) * Relates #33756 --- .../elasticsearch/transport/RemoteClusterConnectionTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index f5d23c4f3f8a2..e77180508b619 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -53,6 +53,7 @@ import org.elasticsearch.mocksocket.MockServerSocket; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.StubbableTransport; import org.elasticsearch.threadpool.TestThreadPool; @@ -834,6 +835,7 @@ public void run() { } } + @TestLogging("_root:DEBUG, org.elasticsearch.transport:TRACE") public void testCloseWhileConcurrentlyConnecting() throws IOException, InterruptedException, BrokenBarrierException { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); From 0301062c6e177deee66924117ac094a3e49fe77d Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad Date: Thu, 27 Sep 2018 15:15:39 +1000 Subject: [PATCH 14/25] Mute SpanMultiTermQueryBuilderTests#testToQuery --- .../main/java/org/elasticsearch/test/AbstractQueryTestCase.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index f71edfde84f0f..8b1867cbcbb29 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -406,6 +406,7 @@ protected boolean builderGeneratesCacheableQueries() { * Test creates the {@link Query} from the {@link QueryBuilder} under test and delegates the * assertions being made on the result to the implementing subclass. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34105") public void testToQuery() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) { QueryShardContext context = createShardContext(); From bda7bc145b467e6b4300ca696310532ccfe05fad Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 27 Sep 2018 09:06:04 +0200 Subject: [PATCH 15/25] Fold EngineSearcher into Engine.Searcher (#34082) EngineSearcher can be easily folded into Engine.Searcher which removes a level of inheritance that is necessary for most of it's subclasses. This change folds it into Engine.Searcher and removes the dependency on ReferenceManager. --- .../elasticsearch/index/engine/Engine.java | 52 +++++++++++--- .../index/engine/EngineSearcher.java | 68 ------------------- .../index/engine/InternalEngine.java | 4 +- .../index/shard/IndexSearcherWrapper.java | 21 ++---- .../shard/IndexSearcherWrapperTests.java | 6 +- .../search/DefaultSearchContextTests.java | 2 +- .../profile/query/QueryProfilerTests.java | 4 +- .../aggregations/AggregatorTestCase.java | 2 +- .../test/engine/AssertingSearcher.java | 11 +-- 9 files changed, 60 insertions(+), 110 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index d70603e0370fe..c45517e956728 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -43,6 +43,7 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; +import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.index.IndexRequest; @@ -663,7 +664,15 @@ public Searcher acquireSearcher(String source, SearcherScope scope) throws Engin } Releasable releasable = store::decRef; try { - EngineSearcher engineSearcher = new EngineSearcher(source, getReferenceManager(scope), store, logger); + ReferenceManager referenceManager = getReferenceManager(scope); + Searcher engineSearcher = new Searcher(source, referenceManager.acquire(), + s -> { + try { + referenceManager.release(s); + } finally { + store.decRef(); + } + }, logger); releasable = null; // success - hand over the reference to the engine searcher return engineSearcher; } catch (AlreadyClosedException ex) { @@ -1167,40 +1176,67 @@ default void onFailedEngine(String reason, @Nullable Exception e) { } public static class Searcher implements Releasable { - private final String source; private final IndexSearcher searcher; + private final AtomicBoolean released = new AtomicBoolean(false); + private final Logger logger; + private final IOUtils.IOConsumer onClose; + + public Searcher(String source, IndexSearcher searcher, Logger logger) { + this(source, searcher, s -> s.getIndexReader().close(), logger); + } - public Searcher(String source, IndexSearcher searcher) { + public Searcher(String source, IndexSearcher searcher, IOUtils.IOConsumer onClose, Logger logger) { this.source = source; this.searcher = searcher; + this.onClose = onClose; + this.logger = logger; } /** * The source that caused this searcher to be acquired. */ - public String source() { + public final String source() { return source; } - public IndexReader reader() { + public final IndexReader reader() { return searcher.getIndexReader(); } - public DirectoryReader getDirectoryReader() { + public final DirectoryReader getDirectoryReader() { if (reader() instanceof DirectoryReader) { return (DirectoryReader) reader(); } throw new IllegalStateException("Can't use " + reader().getClass() + " as a directory reader"); } - public IndexSearcher searcher() { + public final IndexSearcher searcher() { return searcher; } @Override public void close() { - // Nothing to close here + if (released.compareAndSet(false, true) == false) { + /* In general, searchers should never be released twice or this would break reference counting. There is one rare case + * when it might happen though: when the request and the Reaper thread would both try to release it in a very short amount + * of time, this is why we only log a warning instead of throwing an exception. + */ + logger.warn("Searcher was released twice", new IllegalStateException("Double release")); + return; + } + try { + onClose.accept(searcher()); + } catch (IOException e) { + throw new IllegalStateException("Cannot close", e); + } catch (AlreadyClosedException e) { + // This means there's a bug somewhere: don't suppress it + throw new AssertionError(e); + } + } + + public final Logger getLogger() { + return logger; } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java b/server/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java deleted file mode 100644 index 7fd0fe6cc3904..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.engine; - -import org.apache.logging.log4j.Logger; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.ReferenceManager; -import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.index.store.Store; - -import java.io.IOException; -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * Searcher for an Engine - */ -final class EngineSearcher extends Engine.Searcher { - private final AtomicBoolean released = new AtomicBoolean(false); - private final Store store; - private final Logger logger; - private final ReferenceManager referenceManager; - - EngineSearcher(String source, ReferenceManager searcherReferenceManager, Store store, Logger logger) throws IOException { - super(source, searcherReferenceManager.acquire()); - this.store = store; - this.logger = logger; - this.referenceManager = searcherReferenceManager; - } - - @Override - public void close() { - if (!released.compareAndSet(false, true)) { - /* In general, searchers should never be released twice or this would break reference counting. There is one rare case - * when it might happen though: when the request and the Reaper thread would both try to release it in a very short amount - * of time, this is why we only log a warning instead of throwing an exception. - */ - logger.warn("Searcher was released twice", new IllegalStateException("Double release")); - return; - } - try { - referenceManager.release(searcher()); - } catch (IOException e) { - throw new IllegalStateException("Cannot close", e); - } catch (AlreadyClosedException e) { - // This means there's a bug somewhere: don't suppress it - throw new AssertionError(e); - } finally { - store.decRef(); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 5f770e0d93baa..85e7cebcb88fc 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -606,7 +606,7 @@ public GetResult get(Get get, BiFunction search // in the case of a already pruned translog generation we might get null here - yet very unlikely TranslogLeafReader reader = new TranslogLeafReader((Translog.Index) operation, engineConfig .getIndexSettings().getIndexVersionCreated()); - return new GetResult(new Searcher("realtime_get", new IndexSearcher(reader)), + return new GetResult(new Searcher("realtime_get", new IndexSearcher(reader), logger), new VersionsAndSeqNoResolver.DocIdAndVersion(0, ((Translog.Index) operation).version(), reader, 0)); } } catch (IOException e) { @@ -2085,7 +2085,7 @@ public IndexSearcher newSearcher(IndexReader reader, IndexReader previousReader) if (warmer != null) { try { assert searcher.getIndexReader() instanceof ElasticsearchDirectoryReader : "this class needs an ElasticsearchDirectoryReader but got: " + searcher.getIndexReader().getClass(); - warmer.warm(new Searcher("top_reader_warming", searcher)); + warmer.warm(new Searcher("top_reader_warming", searcher, s -> {}, logger)); } catch (Exception e) { if (isEngineClosed.get() == false) { logger.warn("failed to prepare/warm", e); diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java b/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java index a6949c0559722..bc62f4067b9d0 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java @@ -24,8 +24,8 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.search.IndexSearcher; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.engine.Engine; import java.io.IOException; @@ -97,21 +97,10 @@ public final Engine.Searcher wrap(Engine.Searcher engineSearcher) throws IOExcep if (reader == nonClosingReaderWrapper && indexSearcher == innerIndexSearcher) { return engineSearcher; } else { - return new Engine.Searcher(engineSearcher.source(), indexSearcher) { - @Override - public void close() throws ElasticsearchException { - try { - reader().close(); - // we close the reader to make sure wrappers can release resources if needed.... - // our NonClosingReaderWrapper makes sure that our reader is not closed - } catch (IOException e) { - throw new ElasticsearchException("failed to close reader", e); - } finally { - engineSearcher.close(); - } - - } - }; + // we close the reader to make sure wrappers can release resources if needed.... + // our NonClosingReaderWrapper makes sure that our reader is not closed + return new Engine.Searcher(engineSearcher.source(), indexSearcher, s -> IOUtils.close(s.getIndexReader(), engineSearcher), + engineSearcher.getLogger()); } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java index e9f52d7c3198d..a0bf75ddb1340 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java @@ -73,7 +73,7 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { final int sourceRefCount = open.getRefCount(); final AtomicInteger count = new AtomicInteger(); final AtomicInteger outerCount = new AtomicInteger(); - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { + try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, s -> {}, logger)) { final Engine.Searcher wrap = wrapper.wrap(engineSearcher); assertEquals(1, wrap.reader().getRefCount()); ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { @@ -121,7 +121,7 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { } }; final ConcurrentHashMap cache = new ConcurrentHashMap<>(); - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { + try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, s -> {}, logger)) { try (Engine.Searcher wrap = wrapper.wrap(engineSearcher)) { ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { cache.remove(key); @@ -151,7 +151,7 @@ public void testNoWrap() throws IOException { assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); searcher.setSimilarity(iwc.getSimilarity()); IndexSearcherWrapper wrapper = new IndexSearcherWrapper(); - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { + try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, logger)) { final Engine.Searcher wrap = wrapper.wrap(engineSearcher); assertSame(wrap, engineSearcher); } diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index f59cc85c09ccf..22e16342b446c 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -110,7 +110,7 @@ public void testPreProcess() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); IndexReader reader = w.getReader(); - Engine.Searcher searcher = new Engine.Searcher("test", new IndexSearcher(reader))) { + Engine.Searcher searcher = new Engine.Searcher("test", new IndexSearcher(reader), logger)) { DefaultSearchContext context1 = new DefaultSearchContext(1L, shardSearchRequest, null, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, null, Version.CURRENT); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index ba58a79953be1..b7a9c8cb69a31 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -82,7 +82,7 @@ public static void setup() throws IOException { } reader = w.getReader(); w.close(); - Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader)); + Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader), null); searcher = new ContextIndexSearcher(engineSearcher, IndexSearcher.getDefaultQueryCache(), MAYBE_CACHE_POLICY); } @@ -363,7 +363,7 @@ public void testUseIndexStats() throws IOException { public void testApproximations() throws IOException { QueryProfiler profiler = new QueryProfiler(); - Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader)); + Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader), logger); // disable query caching since we want to test approximations, which won't // be exposed on a cached entry ContextIndexSearcher searcher = new ContextIndexSearcher(engineSearcher, null, MAYBE_CACHE_POLICY); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 17202839a65fd..6f9c46b4dc441 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -240,7 +240,7 @@ protected A createAggregator(Query query, } protected SearchContext createSearchContext(IndexSearcher indexSearcher, IndexSettings indexSettings) { - Engine.Searcher searcher = new Engine.Searcher("aggregator_test", indexSearcher); + Engine.Searcher searcher = new Engine.Searcher("aggregator_test", indexSearcher, logger); QueryCache queryCache = new DisabledQueryCache(indexSettings); QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java index cf2d69e36d529..0dbdaa55e3315 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java @@ -38,10 +38,8 @@ class AssertingSearcher extends Engine.Searcher { private final Logger logger; private final AtomicBoolean closed = new AtomicBoolean(false); - AssertingSearcher(IndexSearcher indexSearcher, final Engine.Searcher wrappedSearcher, - ShardId shardId, - Logger logger) { - super(wrappedSearcher.source(), indexSearcher); + AssertingSearcher(IndexSearcher indexSearcher, final Engine.Searcher wrappedSearcher, ShardId shardId, Logger logger) { + super(wrappedSearcher.source(), indexSearcher, s -> {throw new AssertionError();}, logger); // we only use the given index searcher here instead of the IS of the wrapped searcher. the IS might be a wrapped searcher // with a wrapped reader. this.wrappedSearcher = wrappedSearcher; @@ -52,11 +50,6 @@ class AssertingSearcher extends Engine.Searcher { "IndexReader#getRefCount() was [" + initialRefCount + "] expected a value > [0] - reader is already closed"; } - @Override - public String source() { - return wrappedSearcher.source(); - } - @Override public void close() { synchronized (lock) { From a15b1b97d22d63fd21c888b576bd130db8680af7 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Thu, 27 Sep 2018 09:40:15 +0200 Subject: [PATCH 16/25] Watcher: Reduce script cache churn by checking for mustache tags (#33978) Watcher is using a lot of so called TextTemplate fields in a watch definition, which can use mustache to insert the watch id for example. For the user it is non-obvious which field is just a string field or which field is a text template. This also means, that for every such field, we currently do a script compilation, even if the field does not contain any mustache syntax. This will lead to an increased script cache churn, because those compiled scripts (that only contain a string), will evict other scripts. On top of that, this also means that an unneeded compilation has happened, instead of returning that string immediately. The usages of mustache templating are in all of the actions (most of the time far more than one compilation) as well as most of the inputs. Especially when running a lot of watches in parallel, this will reduce execution times and help reuse of real scripts. --- .../common/text/TextTemplateEngine.java | 5 +++ .../common/text/TextTemplateTests.java | 43 +++++++++++++++++-- 2 files changed, 45 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java index 7b87a9e87a542..d2159fd572f5d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java @@ -35,6 +35,11 @@ public String render(TextTemplate textTemplate, Map model) { String mediaType = compileParams(detectContentType(template)); template = trimContentType(textTemplate); + int indexStartMustacheExpression = template.indexOf("{{"); + if (indexStartMustacheExpression == -1) { + return template; + } + Map mergedModel = new HashMap<>(); if (textTemplate.getParams() != null) { mergedModel.putAll(textTemplate.getParams()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java index 0e084af23e1fb..002d833c20913 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java @@ -21,6 +21,7 @@ import java.util.Collections; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import static java.util.Collections.singletonMap; @@ -31,7 +32,10 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; public class TextTemplateTests extends ESTestCase { @@ -47,7 +51,7 @@ public void init() throws Exception { } public void testRender() throws Exception { - String templateText = "_template"; + String templateText = "{{_template}}"; Map params = singletonMap("param_key", "param_val"); Map model = singletonMap("model_key", "model_val"); Map merged = new HashMap<>(params); @@ -72,7 +76,7 @@ public String execute() { } public void testRenderOverridingModel() throws Exception { - String templateText = "_template"; + String templateText = "{{_template}}"; Map params = singletonMap("key", "param_val"); Map model = singletonMap("key", "model_val"); ScriptType type = randomFrom(ScriptType.values()); @@ -94,7 +98,7 @@ public String execute() { } public void testRenderDefaults() throws Exception { - String templateText = "_template"; + String templateText = "{{_template}}"; Map model = singletonMap("key", "model_val"); TemplateScript.Factory compiledTemplate = templateParams -> @@ -113,6 +117,39 @@ public String execute() { assertThat(engine.render(template, model), is("rendered_text")); } + public void testDontInvokeScriptServiceOnNonMustacheText() { + assertNoCompilation("this is my text"); + assertScriptServiceInvoked("}}{{"); + assertScriptServiceInvoked("}}{{ctx.payload}}"); + } + + private void assertNoCompilation(String input) { + String output = engine.render(new TextTemplate(input), Collections.emptyMap()); + assertThat(input, is(output)); + verifyZeroInteractions(service); + } + + private void assertScriptServiceInvoked(final String input) { + ScriptService scriptService = mock(ScriptService.class); + TextTemplateEngine e = new TextTemplateEngine(Settings.EMPTY, scriptService); + + TemplateScript.Factory compiledTemplate = templateParams -> + new TemplateScript(templateParams) { + @Override + public String execute() { + return input.toUpperCase(Locale.ROOT); + } + }; + + when(scriptService.compile(new Script(ScriptType.INLINE, lang, input, + Collections.singletonMap("content_type", "text/plain"), Collections.emptyMap()), Watcher.SCRIPT_TEMPLATE_CONTEXT)) + .thenReturn(compiledTemplate); + + String output = e.render(new TextTemplate(input), Collections.emptyMap()); + verify(scriptService).compile(any(), any()); + assertThat(output, is(input.toUpperCase(Locale.ROOT))); + } + public void testParser() throws Exception { ScriptType type = randomScriptType(); TextTemplate template = From 53d10bb3b2ab1c575c52a01dc45d94ccda63f094 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Thu, 27 Sep 2018 18:40:52 +1000 Subject: [PATCH 17/25] [HLRC] Support for role mapper expression dsl (#33745) This commit adds support for role mapping expression dsl. Functionally it is similar to what we have on the server side except for the rule evaluation which is not required on the client. The role mapper expression can either be field expression or composite expression of one or more expressions. Role mapper expression parser is used to parse JSON DSL to list of expressions. This forms the base for role mapping APIs (get, post/put and delete) --- .../expressiondsl/RoleMapperExpression.java | 30 +++ .../expressions/AllRoleMapperExpression.java | 55 ++++++ .../expressions/AnyRoleMapperExpression.java | 55 ++++++ .../CompositeRoleMapperExpression.java | 100 ++++++++++ .../expressions/CompositeType.java | 59 ++++++ .../ExceptRoleMapperExpression.java | 47 +++++ .../fields/FieldRoleMapperExpression.java | 122 ++++++++++++ .../parser/RoleMapperExpressionParser.java | 180 ++++++++++++++++++ .../RoleMapperExpressionDslTests.java | 97 ++++++++++ .../RoleMapperExpressionParserTests.java | 129 +++++++++++++ 10 files changed, 874 insertions(+) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpression.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AllRoleMapperExpression.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AnyRoleMapperExpression.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeRoleMapperExpression.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeType.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/ExceptRoleMapperExpression.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/fields/FieldRoleMapperExpression.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParser.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpressionDslTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpression.java new file mode 100644 index 0000000000000..10c0d0911ba55 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpression.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl; + +import org.elasticsearch.common.xcontent.ToXContentObject; + +/** + * Implementations of this interface represent an expression used for user role mapping + * that can later be resolved to a boolean value. + */ +public interface RoleMapperExpression extends ToXContentObject { + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AllRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AllRoleMapperExpression.java new file mode 100644 index 0000000000000..b5cbe4d2e425a --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AllRoleMapperExpression.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; + +import java.util.ArrayList; +import java.util.List; + +/** + * An expression that evaluates to true if-and-only-if all its children + * evaluate to true. + * An all expression with no children is always true. + */ +public final class AllRoleMapperExpression extends CompositeRoleMapperExpression { + + private AllRoleMapperExpression(String name, RoleMapperExpression[] elements) { + super(name, elements); + } + + public static Builder builder() { + return new Builder(); + } + + public static final class Builder { + private List elements = new ArrayList<>(); + + public Builder addExpression(final RoleMapperExpression expression) { + assert expression != null : "expression cannot be null"; + elements.add(expression); + return this; + } + + public AllRoleMapperExpression build() { + return new AllRoleMapperExpression(CompositeType.ALL.getName(), elements.toArray(new RoleMapperExpression[0])); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AnyRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AnyRoleMapperExpression.java new file mode 100644 index 0000000000000..7632a071bd1c2 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AnyRoleMapperExpression.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; + +import java.util.ArrayList; +import java.util.List; + +/** + * An expression that evaluates to true if at least one of its children + * evaluate to true. + * An any expression with no children is never true. + */ +public final class AnyRoleMapperExpression extends CompositeRoleMapperExpression { + + private AnyRoleMapperExpression(String name, RoleMapperExpression[] elements) { + super(name, elements); + } + + public static Builder builder() { + return new Builder(); + } + + public static final class Builder { + private List elements = new ArrayList<>(); + + public Builder addExpression(final RoleMapperExpression expression) { + assert expression != null : "expression cannot be null"; + elements.add(expression); + return this; + } + + public AnyRoleMapperExpression build() { + return new AnyRoleMapperExpression(CompositeType.ANY.getName(), elements.toArray(new RoleMapperExpression[0])); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeRoleMapperExpression.java new file mode 100644 index 0000000000000..2519c59b68846 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeRoleMapperExpression.java @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Expression of role mapper expressions which can be combined by operators like AND, OR + *

+ * Expression builder example: + *

+ * {@code
+ * final RoleMapperExpression allExpression = AllRoleMapperExpression.builder()
+                    .addExpression(AnyRoleMapperExpression.builder()
+                            .addExpression(FieldRoleMapperExpression.ofUsername("user1@example.org"))
+                            .addExpression(FieldRoleMapperExpression.ofUsername("user2@example.org"))
+                            .build())
+                    .addExpression(FieldRoleMapperExpression.ofMetadata("metadata.location", "AMER"))
+                    .addExpression(new ExceptRoleMapperExpression(FieldRoleMapperExpression.ofUsername("user3@example.org")))
+                    .build();
+ * }
+ * 
+ */ +public abstract class CompositeRoleMapperExpression implements RoleMapperExpression { + private final String name; + private final List elements; + + CompositeRoleMapperExpression(final String name, final RoleMapperExpression... elements) { + assert name != null : "field name cannot be null"; + assert elements != null : "at least one field expression is required"; + this.name = name; + this.elements = Collections.unmodifiableList(Arrays.asList(elements)); + } + + public String getName() { + return this.getName(); + } + + public List getElements() { + return elements; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + final CompositeRoleMapperExpression that = (CompositeRoleMapperExpression) o; + if (Objects.equals(this.getName(), that.getName()) == false) { + return false; + } + return Objects.equals(this.getElements(), that.getElements()); + } + + @Override + public int hashCode() { + return Objects.hash(name, elements); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.startArray(name); + for (RoleMapperExpression e : elements) { + e.toXContent(builder, params); + } + builder.endArray(); + return builder.endObject(); + } + +} + diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeType.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeType.java new file mode 100644 index 0000000000000..1d6c8aea12263 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeType.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.common.ParseField; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public enum CompositeType { + + ANY("any"), ALL("all"), EXCEPT("except"); + + private static Map nameToType = Collections.unmodifiableMap(initialize()); + private ParseField field; + + CompositeType(String name) { + this.field = new ParseField(name); + } + + public String getName() { + return field.getPreferredName(); + } + + public ParseField getParseField() { + return field; + } + + public static CompositeType fromName(String name) { + return nameToType.get(name); + } + + private static Map initialize() { + Map map = new HashMap<>(); + for (CompositeType field : values()) { + map.put(field.getName(), field); + } + return map; + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/ExceptRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/ExceptRoleMapperExpression.java new file mode 100644 index 0000000000000..c2cad0d18dad1 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/ExceptRoleMapperExpression.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * A negating expression. That is, this expression evaluates to true if-and-only-if + * its delegate expression evaluate to false. + * Syntactically, except expressions are intended to be children of all + * expressions ({@link AllRoleMapperExpression}). + */ +public final class ExceptRoleMapperExpression extends CompositeRoleMapperExpression { + + public ExceptRoleMapperExpression(final RoleMapperExpression expression) { + super(CompositeType.EXCEPT.getName(), expression); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.field(CompositeType.EXCEPT.getName()); + builder.value(getElements().get(0)); + return builder.endObject(); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/fields/FieldRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/fields/FieldRoleMapperExpression.java new file mode 100644 index 0000000000000..c96ac3cc5b5ec --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/fields/FieldRoleMapperExpression.java @@ -0,0 +1,122 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.fields; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * An expression that evaluates to true if a field (map element) matches + * the provided values. A field expression may have more than one provided value, in which + * case the expression is true if any of the values are matched. + *

+ * Expression builder example: + *

+ * {@code
+ * final RoleMapperExpression usernameExpression = FieldRoleMapperExpression.ofUsername("user1@example.org");
+ * }
+ * 
+ */ +public class FieldRoleMapperExpression implements RoleMapperExpression { + + private final String field; + private final List values; + + public FieldRoleMapperExpression(final String field, final Object... values) { + if (field == null || field.isEmpty()) { + throw new IllegalArgumentException("null or empty field name (" + field + ")"); + } + if (values == null || values.length == 0) { + throw new IllegalArgumentException("null or empty values (" + values + ")"); + } + this.field = field; + this.values = Collections.unmodifiableList(Arrays.asList(values)); + } + + public String getField() { + return field; + } + + public List getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + final FieldRoleMapperExpression that = (FieldRoleMapperExpression) o; + + return Objects.equals(this.getField(), that.getField()) && Objects.equals(this.getValues(), that.getValues()); + } + + @Override + public int hashCode() { + int result = field.hashCode(); + result = 31 * result + values.hashCode(); + return result; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startObject("field"); + builder.startArray(this.field); + for (Object value : values) { + builder.value(value); + } + builder.endArray(); + builder.endObject(); + return builder.endObject(); + } + + public static FieldRoleMapperExpression ofUsername(Object... values) { + return ofKeyValues("username", values); + } + + public static FieldRoleMapperExpression ofGroups(Object... values) { + return ofKeyValues("groups", values); + } + + public static FieldRoleMapperExpression ofDN(Object... values) { + return ofKeyValues("dn", values); + } + + public static FieldRoleMapperExpression ofMetadata(String key, Object... values) { + if (key.startsWith("metadata.") == false) { + throw new IllegalArgumentException("metadata key must have prefix 'metadata.'"); + } + return ofKeyValues(key, values); + } + + public static FieldRoleMapperExpression ofKeyValues(String key, Object... values) { + return new FieldRoleMapperExpression(key, values); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParser.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParser.java new file mode 100644 index 0000000000000..98de4f4c2092c --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParser.java @@ -0,0 +1,180 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.parser; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.AllRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.CompositeType; +import org.elasticsearch.client.security.support.expressiondsl.expressions.ExceptRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * Parses the JSON (XContent) based boolean expression DSL into a tree of + * {@link RoleMapperExpression} objects. + * Note: As this is client side parser, it mostly validates the structure of + * DSL being parsed it does not enforce rules + * like allowing "except" within "except" or "any" expressions. + */ +public final class RoleMapperExpressionParser { + public static final ParseField FIELD = new ParseField("field"); + + /** + * @param name The name of the expression tree within its containing object. + * Used to provide descriptive error messages. + * @param parser A parser over the XContent (typically JSON) DSL + * representation of the expression + */ + public RoleMapperExpression parse(final String name, final XContentParser parser) throws IOException { + return parseRulesObject(name, parser); + } + + private RoleMapperExpression parseRulesObject(final String objectName, final XContentParser parser) + throws IOException { + // find the start of the DSL object + final XContentParser.Token token; + if (parser.currentToken() == null) { + token = parser.nextToken(); + } else { + token = parser.currentToken(); + } + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. expected [{}] to be an object but found [{}] instead", + objectName, token); + } + + final String fieldName = fieldName(objectName, parser); + final RoleMapperExpression expr = parseExpression(parser, fieldName, objectName); + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. object [{}] contains multiple fields", objectName); + } + return expr; + } + + private RoleMapperExpression parseExpression(XContentParser parser, String field, String objectName) + throws IOException { + + if (CompositeType.ANY.getParseField().match(field, parser.getDeprecationHandler())) { + final AnyRoleMapperExpression.Builder builder = AnyRoleMapperExpression.builder(); + parseExpressionArray(CompositeType.ANY.getParseField(), parser).forEach(builder::addExpression); + return builder.build(); + } else if (CompositeType.ALL.getParseField().match(field, parser.getDeprecationHandler())) { + final AllRoleMapperExpression.Builder builder = AllRoleMapperExpression.builder(); + parseExpressionArray(CompositeType.ALL.getParseField(), parser).forEach(builder::addExpression); + return builder.build(); + } else if (FIELD.match(field, parser.getDeprecationHandler())) { + return parseFieldExpression(parser); + } else if (CompositeType.EXCEPT.getParseField().match(field, parser.getDeprecationHandler())) { + return parseExceptExpression(parser); + } else { + throw new ElasticsearchParseException("failed to parse rules expression. field [{}] is not recognised in object [{}]", field, + objectName); + } + } + + private RoleMapperExpression parseFieldExpression(XContentParser parser) throws IOException { + checkStartObject(parser); + final String fieldName = fieldName(FIELD.getPreferredName(), parser); + + final List values; + if (parser.nextToken() == XContentParser.Token.START_ARRAY) { + values = parseArray(FIELD, parser, this::parseFieldValue); + } else { + values = Collections.singletonList(parseFieldValue(parser)); + } + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. object [{}] contains multiple fields", + FIELD.getPreferredName()); + } + + return FieldRoleMapperExpression.ofKeyValues(fieldName, values.toArray()); + } + + private RoleMapperExpression parseExceptExpression(XContentParser parser) throws IOException { + checkStartObject(parser); + return new ExceptRoleMapperExpression(parseRulesObject(CompositeType.EXCEPT.getName(), parser)); + } + + private void checkStartObject(XContentParser parser) throws IOException { + final XContentParser.Token token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. expected an object but found [{}] instead", token); + } + } + + private String fieldName(String objectName, XContentParser parser) throws IOException { + if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { + throw new ElasticsearchParseException("failed to parse rules expression. object [{}] does not contain any fields", objectName); + } + String parsedFieldName = parser.currentName(); + return parsedFieldName; + } + + private List parseExpressionArray(ParseField field, XContentParser parser) + throws IOException { + parser.nextToken(); // parseArray requires that the parser is positioned + // at the START_ARRAY token + return parseArray(field, parser, p -> parseRulesObject(field.getPreferredName(), p)); + } + + private List parseArray(ParseField field, XContentParser parser, CheckedFunction elementParser) + throws IOException { + final XContentParser.Token token = parser.currentToken(); + if (token == XContentParser.Token.START_ARRAY) { + List list = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + list.add(elementParser.apply(parser)); + } + return list; + } else { + throw new ElasticsearchParseException("failed to parse rules expression. field [{}] requires an array", field); + } + } + + private Object parseFieldValue(XContentParser parser) throws IOException { + switch (parser.currentToken()) { + case VALUE_STRING: + return parser.text(); + + case VALUE_BOOLEAN: + return parser.booleanValue(); + + case VALUE_NUMBER: + return parser.longValue(); + + case VALUE_NULL: + return null; + + default: + throw new ElasticsearchParseException("failed to parse rules expression. expected a field value but found [{}] instead", parser + .currentToken()); + } + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpressionDslTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpressionDslTests.java new file mode 100644 index 0000000000000..df94640f172dd --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpressionDslTests.java @@ -0,0 +1,97 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl; + +import org.elasticsearch.client.security.support.expressiondsl.expressions.AllRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.ExceptRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Date; + +import static org.hamcrest.Matchers.equalTo; + +public class RoleMapperExpressionDslTests extends ESTestCase { + + public void testRoleMapperExpressionToXContentType() throws IOException { + + final RoleMapperExpression allExpression = AllRoleMapperExpression.builder() + .addExpression(AnyRoleMapperExpression.builder() + .addExpression(FieldRoleMapperExpression.ofDN("*,ou=admin,dc=example,dc=com")) + .addExpression(FieldRoleMapperExpression.ofUsername("es-admin", "es-system")) + .build()) + .addExpression(FieldRoleMapperExpression.ofGroups("cn=people,dc=example,dc=com")) + .addExpression(new ExceptRoleMapperExpression(FieldRoleMapperExpression.ofMetadata("metadata.terminated_date", new Date( + 1537145401027L)))) + .build(); + + final XContentBuilder builder = XContentFactory.jsonBuilder(); + allExpression.toXContent(builder, ToXContent.EMPTY_PARAMS); + final String output = Strings.toString(builder); + final String expected = + "{"+ + "\"all\":["+ + "{"+ + "\"any\":["+ + "{"+ + "\"field\":{"+ + "\"dn\":[\"*,ou=admin,dc=example,dc=com\"]"+ + "}"+ + "},"+ + "{"+ + "\"field\":{"+ + "\"username\":["+ + "\"es-admin\","+ + "\"es-system\""+ + "]"+ + "}"+ + "}"+ + "]"+ + "},"+ + "{"+ + "\"field\":{"+ + "\"groups\":[\"cn=people,dc=example,dc=com\"]"+ + "}"+ + "},"+ + "{"+ + "\"except\":{"+ + "\"field\":{"+ + "\"metadata.terminated_date\":[\"2018-09-17T00:50:01.027Z\"]"+ + "}"+ + "}"+ + "}"+ + "]"+ + "}"; + + assertThat(expected, equalTo(output)); + } + + public void testFieldRoleMapperExpressionThrowsExceptionForMissingMetadataPrefix() { + final IllegalArgumentException ile = expectThrows(IllegalArgumentException.class, () -> FieldRoleMapperExpression.ofMetadata( + "terminated_date", new Date(1537145401027L))); + assertThat(ile.getMessage(), equalTo("metadata key must have prefix 'metadata.'")); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java new file mode 100644 index 0000000000000..24ed5684fa856 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java @@ -0,0 +1,129 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.parser; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.CompositeRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.iterableWithSize; + +public class RoleMapperExpressionParserTests extends ESTestCase { + + public void testParseSimpleFieldExpression() throws Exception { + String json = "{ \"field\": { \"username\" : [\"*@shield.gov\"] } }"; + FieldRoleMapperExpression field = checkExpressionType(parse(json), FieldRoleMapperExpression.class); + assertThat(field.getField(), equalTo("username")); + assertThat(field.getValues(), iterableWithSize(1)); + assertThat(field.getValues().get(0), equalTo("*@shield.gov")); + + assertThat(toJson(field), equalTo(json.replaceAll("\\s", ""))); + } + + public void testParseComplexExpression() throws Exception { + String json = "{ \"any\": [" + + " { \"field\": { \"username\" : \"*@shield.gov\" } }, " + + " { \"all\": [" + + " { \"field\": { \"username\" : \"/.*\\\\@avengers\\\\.(net|org)/\" } }, " + + " { \"field\": { \"groups\" : [ \"admin\", \"operators\" ] } }, " + + " { \"except\":" + + " { \"field\": { \"groups\" : \"disavowed\" } }" + + " }" + + " ] }" + + "] }"; + final RoleMapperExpression expr = parse(json); + + assertThat(expr, instanceOf(CompositeRoleMapperExpression.class)); + CompositeRoleMapperExpression any = (CompositeRoleMapperExpression) expr; + + assertThat(any.getElements(), iterableWithSize(2)); + + final FieldRoleMapperExpression fieldShield = checkExpressionType(any.getElements().get(0), + FieldRoleMapperExpression.class); + assertThat(fieldShield.getField(), equalTo("username")); + assertThat(fieldShield.getValues(), iterableWithSize(1)); + assertThat(fieldShield.getValues().get(0), equalTo("*@shield.gov")); + + final CompositeRoleMapperExpression all = checkExpressionType(any.getElements().get(1), + CompositeRoleMapperExpression.class); + assertThat(all.getElements(), iterableWithSize(3)); + + final FieldRoleMapperExpression fieldAvengers = checkExpressionType(all.getElements().get(0), + FieldRoleMapperExpression.class); + assertThat(fieldAvengers.getField(), equalTo("username")); + assertThat(fieldAvengers.getValues(), iterableWithSize(1)); + assertThat(fieldAvengers.getValues().get(0), equalTo("/.*\\@avengers\\.(net|org)/")); + + final FieldRoleMapperExpression fieldGroupsAdmin = checkExpressionType(all.getElements().get(1), + FieldRoleMapperExpression.class); + assertThat(fieldGroupsAdmin.getField(), equalTo("groups")); + assertThat(fieldGroupsAdmin.getValues(), iterableWithSize(2)); + assertThat(fieldGroupsAdmin.getValues().get(0), equalTo("admin")); + assertThat(fieldGroupsAdmin.getValues().get(1), equalTo("operators")); + + final CompositeRoleMapperExpression except = checkExpressionType(all.getElements().get(2), + CompositeRoleMapperExpression.class); + final FieldRoleMapperExpression fieldDisavowed = checkExpressionType(except.getElements().get(0), + FieldRoleMapperExpression.class); + assertThat(fieldDisavowed.getField(), equalTo("groups")); + assertThat(fieldDisavowed.getValues(), iterableWithSize(1)); + assertThat(fieldDisavowed.getValues().get(0), equalTo("disavowed")); + + } + + private String toJson(final RoleMapperExpression expr) throws IOException { + final XContentBuilder builder = XContentFactory.jsonBuilder(); + expr.toXContent(builder, ToXContent.EMPTY_PARAMS); + final String output = Strings.toString(builder); + return output; + } + + private T checkExpressionType(RoleMapperExpression expr, Class type) { + assertThat(expr, instanceOf(type)); + return type.cast(expr); + } + + private RoleMapperExpression parse(String json) throws IOException { + return new RoleMapperExpressionParser().parse("rules", XContentType.JSON.xContent().createParser(new NamedXContentRegistry( + Collections.emptyList()), new DeprecationHandler() { + @Override + public void usedDeprecatedName(String usedName, String modernName) { + } + + @Override + public void usedDeprecatedField(String usedName, String replacedWith) { + } + }, json)); + } + +} From 1cc53b598868337c184601f18db08071791d4290 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Thu, 27 Sep 2018 10:02:29 +0100 Subject: [PATCH 18/25] Bad regex in CORS settings should throw a nicer error (#34035) Currently a bad regex in CORS settings throws a PatternSyntaxException, which then bubbles up through the bootstrap code, meaning users have to parse a stack trace to work out where the problem is. We should instead catch this exception and rethrow with a more useful error message. --- .../http/netty4/Netty4HttpServerTransport.java | 16 +++++++++++----- .../netty4/Netty4HttpServerTransportTests.java | 13 +++++++++++++ .../http/nio/NioHttpServerTransport.java | 16 +++++++++++----- .../http/nio/NioHttpServerTransportTests.java | 13 +++++++++++++ 4 files changed, 48 insertions(+), 10 deletions(-) diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 5f2e8b8c87128..a73b505728025 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -48,6 +48,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; @@ -68,6 +69,7 @@ import java.util.Arrays; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; @@ -241,11 +243,15 @@ static Netty4CorsConfig buildCorsConfig(Settings settings) { } else if (origin.equals(ANY_ORIGIN)) { builder = Netty4CorsConfigBuilder.forAnyOrigin(); } else { - Pattern p = RestUtils.checkCorsSettingForRegex(origin); - if (p == null) { - builder = Netty4CorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); - } else { - builder = Netty4CorsConfigBuilder.forPattern(p); + try { + Pattern p = RestUtils.checkCorsSettingForRegex(origin); + if (p == null) { + builder = Netty4CorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); + } else { + builder = Netty4CorsConfigBuilder.forPattern(p); + } + } catch (PatternSyntaxException e) { + throw new SettingsException("Bad regex in [" + SETTING_CORS_ALLOW_ORIGIN.getKey() + "]: [" + origin + "]", e); } } if (SETTING_CORS_ALLOW_CREDENTIALS.get(settings)) { diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index 1c3c71d710d17..63e38823acb31 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -75,6 +76,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; import static org.elasticsearch.common.Strings.collectionToDelimitedString; @@ -148,6 +150,17 @@ public void testCorsConfigWithDefaults() { assertFalse(corsConfig.isCredentialsAllowed()); } + public void testCorsConfigWithBadRegex() { + final Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "/[*/") + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .build(); + SettingsException e = expectThrows(SettingsException.class, () -> Netty4HttpServerTransport.buildCorsConfig(settings)); + assertThat(e.getMessage(), containsString("Bad regex in [http.cors.allow-origin]: [/[*/]")); + assertThat(e.getCause(), instanceOf(PatternSyntaxException.class)); + } + /** * Test that {@link Netty4HttpServerTransport} supports the "Expect: 100-continue" HTTP header * @throws InterruptedException if the client communication with the server is interrupted diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java index 9c672c1caf15a..a7f8768bb691f 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; @@ -57,6 +58,7 @@ import java.util.Arrays; import java.util.function.Consumer; import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; import static org.elasticsearch.common.settings.Setting.intSetting; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; @@ -176,11 +178,15 @@ static NioCorsConfig buildCorsConfig(Settings settings) { } else if (origin.equals(ANY_ORIGIN)) { builder = NioCorsConfigBuilder.forAnyOrigin(); } else { - Pattern p = RestUtils.checkCorsSettingForRegex(origin); - if (p == null) { - builder = NioCorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); - } else { - builder = NioCorsConfigBuilder.forPattern(p); + try { + Pattern p = RestUtils.checkCorsSettingForRegex(origin); + if (p == null) { + builder = NioCorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); + } else { + builder = NioCorsConfigBuilder.forPattern(p); + } + } catch (PatternSyntaxException e) { + throw new SettingsException("Bad regex in [" + SETTING_CORS_ALLOW_ORIGIN.getKey() + "]: [" + origin + "]", e); } } if (SETTING_CORS_ALLOW_CREDENTIALS.get(settings)) { diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java index 8acec830f11b9..13b8e60336e02 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.MockBigArrays; @@ -65,6 +66,7 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; @@ -139,6 +141,17 @@ public void testCorsConfigWithDefaults() { assertFalse(corsConfig.isCredentialsAllowed()); } + public void testCorsConfigWithBadRegex() { + final Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "/[*/") + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .build(); + SettingsException e = expectThrows(SettingsException.class, () -> NioHttpServerTransport.buildCorsConfig(settings)); + assertThat(e.getMessage(), containsString("Bad regex in [http.cors.allow-origin]: [/[*/]")); + assertThat(e.getCause(), instanceOf(PatternSyntaxException.class)); + } + /** * Test that {@link NioHttpServerTransport} supports the "Expect: 100-continue" HTTP header * @throws InterruptedException if the client communication with the server is interrupted From cb4cdf17f0fc09c5066c8fadc64becf0b5d3cc73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 27 Sep 2018 11:10:45 +0200 Subject: [PATCH 19/25] Update MovAvgIT AwaitsFix bug url --- .../search/aggregations/pipeline/moving/avg/MovAvgIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index e9483f8be1fa1..41bbf053ff18b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -847,7 +847,7 @@ public void testNegativePrediction() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34098") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34046") public void testHoltWintersNotEnoughData() { Client client = client(); expectThrows(SearchPhaseExecutionException.class, () -> client.prepareSearch("idx").setTypes("type") From 269ae0bc15e312e91beb497191e44516197b66fa Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 27 Sep 2018 14:19:08 +0200 Subject: [PATCH 20/25] Handle MatchNoDocsQuery in span query wrappers (#34106) * Handle MatchNoDocsQuery in span query wrappers This change adds a new SpanMatchNoDocsQuery query that replaces MatchNoDocsQuery in the span query wrappers. The `wildcard` query now returns MatchNoDocsQuery if the target field is not in the mapping (#34093) so we need the equivalent span query in order to be able to pass it to other span wrappers. Closes #34105 --- .../lucene/queries/SpanMatchNoDocsQuery.java | 87 +++++++++++++ .../index/query/FuzzyQueryBuilder.java | 1 + .../index/query/MultiTermQueryBuilder.java | 5 +- .../index/query/PrefixQueryBuilder.java | 1 + .../index/query/RangeQueryBuilder.java | 1 + .../index/query/RegexpQueryBuilder.java | 1 + .../query/SpanMultiTermQueryBuilder.java | 9 +- .../index/query/WildcardQueryBuilder.java | 1 + .../queries/SpanMatchNoDocsQueryTests.java | 117 ++++++++++++++++++ .../query/SpanMultiTermQueryBuilderTests.java | 11 +- .../query/SpanNearQueryBuilderTests.java | 2 + .../test/AbstractQueryTestCase.java | 1 - 12 files changed, 233 insertions(+), 4 deletions(-) create mode 100644 server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java create mode 100644 server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java diff --git a/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java b/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java new file mode 100644 index 0000000000000..55b78739d1d9a --- /dev/null +++ b/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.lucene.queries; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermStates; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanWeight; +import org.apache.lucene.search.spans.Spans; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.Set; + +/** + * A {@link SpanQuery} that matches no documents. + */ +public class SpanMatchNoDocsQuery extends SpanQuery { + private final String field; + private final String reason; + + public SpanMatchNoDocsQuery(String field, String reason) { + this.field = field; + this.reason = reason; + } + + @Override + public String getField() { + return field; + } + + @Override + public String toString(String field) { + return "SpanMatchNoDocsQuery(\"" + reason + "\")"; + } + + @Override + public boolean equals(Object o) { + return sameClassAs(o); + } + + @Override + public int hashCode() { + return classHash(); + } + + @Override + public SpanWeight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + return new SpanWeight(this, searcher, Collections.emptyMap(), boost) { + @Override + public void extractTermStates(Map contexts) {} + + @Override + public Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) { + return null; + } + + @Override + public void extractTerms(Set terms) {} + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return true; + } + }; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 379f583ef6082..93528bb952042 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -181,6 +181,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeOptionalString(this.rewrite); } + @Override public String fieldName() { return this.fieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java index be9abfc5e44b6..ee9fa5b114b7d 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java @@ -19,5 +19,8 @@ package org.elasticsearch.index.query; public interface MultiTermQueryBuilder extends QueryBuilder { - + /** + * Get the field name for this query. + */ + String fieldName(); } diff --git a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java index c1cd99d712a5a..eacb2be100c98 100644 --- a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java @@ -87,6 +87,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeOptionalString(rewrite); } + @Override public String fieldName() { return this.fieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index d57e7cbaff457..756c6456a9f13 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -146,6 +146,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { /** * Get the field name for this query. */ + @Override public String fieldName() { return this.fieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index 39d7c1e2cf016..472c101487443 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -104,6 +104,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { } /** Returns the field name used in this query. */ + @Override public String fieldName() { return this.fieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 6ea068176b41e..22fca7d1d0b8f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -21,9 +21,11 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.TermStates; +import org.apache.lucene.queries.SpanMatchNoDocsQuery; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; @@ -190,9 +192,14 @@ protected Query doToQuery(QueryShardContext context) throws IOException { break; } } - final SpanQuery spanQuery; // no MultiTermQuery extends SpanQuery, so SpanBoostQuery is not supported here assert subQuery instanceof SpanBoostQuery == false; + + if (subQuery instanceof MatchNoDocsQuery) { + return new SpanMatchNoDocsQuery(multiTermQueryBuilder.fieldName(), subQuery.toString()); + } + + final SpanQuery spanQuery; if (subQuery instanceof TermQuery) { /** * Text fields that index prefixes can rewrite prefix queries diff --git a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index 697e7ac864e85..0b855bd50a426 100644 --- a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -96,6 +96,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeOptionalString(rewrite); } + @Override public String fieldName() { return fieldName; } diff --git a/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java b/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java new file mode 100644 index 0000000000000..6187fc1f7f6d9 --- /dev/null +++ b/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.queries; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.store.Directory; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class SpanMatchNoDocsQueryTests extends ESTestCase { + public void testSimple() throws Exception { + SpanMatchNoDocsQuery query = new SpanMatchNoDocsQuery("field", "a good reason"); + assertEquals(query.toString(), "SpanMatchNoDocsQuery(\"a good reason\")"); + Query rewrite = query.rewrite(null); + assertTrue(rewrite instanceof SpanMatchNoDocsQuery); + assertEquals(rewrite.toString(), "SpanMatchNoDocsQuery(\"a good reason\")"); + } + + public void testQuery() throws Exception { + Directory dir = newDirectory(); + Analyzer analyzer = new MockAnalyzer(random()); + IndexWriter iw = new IndexWriter(dir, + newIndexWriterConfig(analyzer).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy())); + addDoc("one", iw); + addDoc("two", iw); + addDoc("three", iw); + IndexReader ir = DirectoryReader.open(iw); + IndexSearcher searcher = new IndexSearcher(ir); + + Query query = new SpanMatchNoDocsQuery("unkwown", "field not found"); + assertEquals(searcher.count(query), 0); + + ScoreDoc[] hits; + hits = searcher.search(query, 1000).scoreDocs; + assertEquals(0, hits.length); + assertEquals(query.toString(), "SpanMatchNoDocsQuery(\"field not found\")"); + + SpanOrQuery orQuery = new SpanOrQuery( + new SpanMatchNoDocsQuery("unknown", "field not found"), + new SpanTermQuery(new Term("unknown", "one")) + ); + assertEquals(searcher.count(orQuery), 0); + hits = searcher.search(orQuery, 1000).scoreDocs; + assertEquals(0, hits.length); + + orQuery = new SpanOrQuery( + new SpanMatchNoDocsQuery("key", "a good reason"), + new SpanTermQuery(new Term("key", "one")) + ); + assertEquals(searcher.count(orQuery), 1); + hits = searcher.search(orQuery, 1000).scoreDocs; + assertEquals(1, hits.length); + Query rewrite = orQuery.rewrite(ir); + assertEquals(rewrite, orQuery); + + SpanNearQuery nearQuery = new SpanNearQuery( + new SpanQuery[] {new SpanMatchNoDocsQuery("same", ""), new SpanMatchNoDocsQuery("same", "")}, + 0, true); + assertEquals(searcher.count(nearQuery), 0); + hits = searcher.search(nearQuery, 1000).scoreDocs; + assertEquals(0, hits.length); + rewrite = nearQuery.rewrite(ir); + assertEquals(rewrite, nearQuery); + + iw.close(); + ir.close(); + dir.close(); + } + + public void testEquals() { + Query q1 = new SpanMatchNoDocsQuery("key1", "one"); + Query q2 = new SpanMatchNoDocsQuery("key2", "two"); + assertTrue(q1.equals(q2)); + QueryUtils.check(q1); + } + + private void addDoc(String text, IndexWriter iw) throws IOException { + Document doc = new Document(); + Field f = newTextField("key", text, Field.Store.YES); + doc.add(f); + iw.addDocument(doc); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index 7c459737c771c..a4bbd1989dabc 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.SpanMatchNoDocsQuery; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.MultiTermQuery; @@ -81,6 +82,9 @@ protected SpanMultiTermQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { + if (query instanceof SpanMatchNoDocsQuery) { + return; + } if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) { assertThat(query, instanceOf(SpanBoostQuery.class)); SpanBoostQuery boostQuery = (SpanBoostQuery) query; @@ -97,7 +101,7 @@ protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query } assertThat(multiTermQuery, either(instanceOf(MultiTermQuery.class)).or(instanceOf(TermQuery.class))); assertThat(spanMultiTermQueryWrapper.getWrappedQuery(), - equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery)multiTermQuery).getWrappedQuery())); + equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery) multiTermQuery).getWrappedQuery())); } public void testIllegalArgument() { @@ -154,6 +158,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public void writeTo(StreamOutput out) throws IOException { } + + @Override + public String fieldName() { + return "foo"; + } } /** diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java index 359793adcf6af..0e22f33db77c7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.lucene.queries.SpanMatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.spans.SpanBoostQuery; import org.apache.lucene.search.spans.SpanNearQuery; @@ -53,6 +54,7 @@ protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query quer assertThat(query, either(instanceOf(SpanNearQuery.class)) .or(instanceOf(SpanTermQuery.class)) .or(instanceOf(SpanBoostQuery.class)) + .or(instanceOf(SpanMatchNoDocsQuery.class)) .or(instanceOf(MatchAllQueryBuilder.class))); if (query instanceof SpanNearQuery) { SpanNearQuery spanNearQuery = (SpanNearQuery) query; diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 8b1867cbcbb29..f71edfde84f0f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -406,7 +406,6 @@ protected boolean builderGeneratesCacheableQueries() { * Test creates the {@link Query} from the {@link QueryBuilder} under test and delegates the * assertions being made on the result to the implementing subclass. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34105") public void testToQuery() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) { QueryShardContext context = createShardContext(); From 609ccaad07cbb2a2629c8e051a24e98538bba863 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Thu, 27 Sep 2018 15:33:22 +0300 Subject: [PATCH 21/25] [ML][HLRC] Replace REST-based ML test cleanup with the ML client (#34109) Now that all basic APIs for managing jobs and datafeeds have been implemented we replace the duplicated `MlRestTestStateCleaner` with an implementation that uses the HLRC Machine Learning client itself. --- .../client/MachineLearningGetResultsIT.java | 2 +- .../client/MachineLearningIT.java | 3 +- .../client/MlRestTestStateCleaner.java | 109 ------------------ .../client/MlTestStateCleaner.java | 102 ++++++++++++++++ .../MlClientDocumentationIT.java | 4 +- 5 files changed, 106 insertions(+), 114 deletions(-) delete mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/MlTestStateCleaner.java diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java index ddaec64157381..751f4cfdf0efe 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java @@ -147,7 +147,7 @@ private void addCategoriesIndexRequests(BulkRequest bulkRequest) { @After public void deleteJob() throws IOException { - new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testGetCategories() throws IOException { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 11fbb5561d9fe..a8050397ad109 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.client; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; - import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.bulk.BulkRequest; @@ -93,7 +92,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { @After public void cleanUp() throws IOException { - new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testPutJob() throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java deleted file mode 100644 index 7ad86576245ef..0000000000000 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.client; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.test.rest.ESRestTestCase; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -/** - * This is temporarily duplicated from the server side. - * @TODO Replace with an implementation using the HLRC once - * the APIs for managing datafeeds are implemented. - */ -public class MlRestTestStateCleaner { - - private final Logger logger; - private final RestClient adminClient; - - public MlRestTestStateCleaner(Logger logger, RestClient adminClient) { - this.logger = logger; - this.adminClient = adminClient; - } - - public void clearMlMetadata() throws IOException { - deleteAllDatafeeds(); - deleteAllJobs(); - // indices will be deleted by the ESRestTestCase class - } - - @SuppressWarnings("unchecked") - private void deleteAllDatafeeds() throws IOException { - final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds"); - datafeedsRequest.addParameter("filter_path", "datafeeds"); - final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest); - final List> datafeeds = - (List>) XContentMapValues.extractValue("datafeeds", ESRestTestCase.entityAsMap(datafeedsResponse)); - if (datafeeds == null) { - return; - } - - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop")); - } catch (Exception e1) { - logger.warn("failed to stop all datafeeds. Forcing stop", e1); - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true")); - } catch (Exception e2) { - logger.warn("Force-closing all data feeds failed", e2); - } - throw new RuntimeException( - "Had to resort to force-stopping datafeeds, something went wrong?", e1); - } - - for (Map datafeed : datafeeds) { - String datafeedId = (String) datafeed.get("datafeed_id"); - adminClient.performRequest(new Request("DELETE", "/_xpack/ml/datafeeds/" + datafeedId)); - } - } - - private void deleteAllJobs() throws IOException { - final Request jobsRequest = new Request("GET", "/_xpack/ml/anomaly_detectors"); - jobsRequest.addParameter("filter_path", "jobs"); - final Response response = adminClient.performRequest(jobsRequest); - @SuppressWarnings("unchecked") - final List> jobConfigs = - (List>) XContentMapValues.extractValue("jobs", ESRestTestCase.entityAsMap(response)); - if (jobConfigs == null) { - return; - } - - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close")); - } catch (Exception e1) { - logger.warn("failed to close all jobs. Forcing closed", e1); - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close?force=true")); - } catch (Exception e2) { - logger.warn("Force-closing all jobs failed", e2); - } - throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?", - e1); - } - - for (Map jobConfig : jobConfigs) { - String jobId = (String) jobConfig.get("job_id"); - adminClient.performRequest(new Request("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId)); - } - } -} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlTestStateCleaner.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlTestStateCleaner.java new file mode 100644 index 0000000000000..c565af7c37202 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlTestStateCleaner.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.ml.CloseJobRequest; +import org.elasticsearch.client.ml.DeleteDatafeedRequest; +import org.elasticsearch.client.ml.DeleteJobRequest; +import org.elasticsearch.client.ml.GetDatafeedRequest; +import org.elasticsearch.client.ml.GetDatafeedResponse; +import org.elasticsearch.client.ml.GetJobRequest; +import org.elasticsearch.client.ml.GetJobResponse; +import org.elasticsearch.client.ml.StopDatafeedRequest; +import org.elasticsearch.client.ml.datafeed.DatafeedConfig; +import org.elasticsearch.client.ml.job.config.Job; + +import java.io.IOException; + +/** + * Cleans up and ML resources created during tests + */ +public class MlTestStateCleaner { + + private final Logger logger; + private final MachineLearningClient mlClient; + + public MlTestStateCleaner(Logger logger, MachineLearningClient mlClient) { + this.logger = logger; + this.mlClient = mlClient; + } + + public void clearMlMetadata() throws IOException { + deleteAllDatafeeds(); + deleteAllJobs(); + } + + private void deleteAllDatafeeds() throws IOException { + stopAllDatafeeds(); + + GetDatafeedResponse getDatafeedResponse = mlClient.getDatafeed(GetDatafeedRequest.getAllDatafeedsRequest(), RequestOptions.DEFAULT); + for (DatafeedConfig datafeed : getDatafeedResponse.datafeeds()) { + mlClient.deleteDatafeed(new DeleteDatafeedRequest(datafeed.getId()), RequestOptions.DEFAULT); + } + } + + private void stopAllDatafeeds() { + StopDatafeedRequest stopAllDatafeedsRequest = StopDatafeedRequest.stopAllDatafeedsRequest(); + try { + mlClient.stopDatafeed(stopAllDatafeedsRequest, RequestOptions.DEFAULT); + } catch (Exception e1) { + logger.warn("failed to stop all datafeeds. Forcing stop", e1); + try { + stopAllDatafeedsRequest.setForce(true); + mlClient.stopDatafeed(stopAllDatafeedsRequest, RequestOptions.DEFAULT); + } catch (Exception e2) { + logger.warn("Force-closing all data feeds failed", e2); + } + throw new RuntimeException("Had to resort to force-stopping datafeeds, something went wrong?", e1); + } + } + + private void deleteAllJobs() throws IOException { + closeAllJobs(); + + GetJobResponse getJobResponse = mlClient.getJob(GetJobRequest.getAllJobsRequest(), RequestOptions.DEFAULT); + for (Job job : getJobResponse.jobs()) { + mlClient.deleteJob(new DeleteJobRequest(job.getId()), RequestOptions.DEFAULT); + } + } + + private void closeAllJobs() { + CloseJobRequest closeAllJobsRequest = CloseJobRequest.closeAllJobsRequest(); + try { + mlClient.closeJob(closeAllJobsRequest, RequestOptions.DEFAULT); + } catch (Exception e1) { + logger.warn("failed to close all jobs. Forcing closed", e1); + closeAllJobsRequest.setForce(true); + try { + mlClient.closeJob(closeAllJobsRequest, RequestOptions.DEFAULT); + } catch (Exception e2) { + logger.warn("Force-closing all jobs failed", e2); + } + throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?", e1); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 1613b34693e14..a9fbb56f68fa2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -30,7 +30,7 @@ import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.MachineLearningGetResultsIT; import org.elasticsearch.client.MachineLearningIT; -import org.elasticsearch.client.MlRestTestStateCleaner; +import org.elasticsearch.client.MlTestStateCleaner; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.ml.CloseJobRequest; @@ -126,7 +126,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { @After public void cleanUp() throws IOException { - new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testCreateJob() throws Exception { From 899a7c7d99b9b3e1f7dece16a614bccaf780bebe Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 27 Sep 2018 09:47:51 -0400 Subject: [PATCH 22/25] Fix remote cluster seeds fallback (#34090) Recently we introduced the settings cluster.remote to take the place of search.remote for configuring remote cluster connections. We made this change due to the fact that we have generalized the remote cluster infrastructure to also be used within cross-cluster replication and not only cross-cluster search. For backwards compatibility, when we made this change, we allowed that cluster.remote would fallback to search.remote. Alas, the initial change for this contained a bug for handling the proxy and seeds settings. The bug for the seeds settings arose because we were manually iterating over the concrete settings only for cluster.remote seeds but not for search.remote seeds. This commit addresses this by iterating over both cluster.remote seeds and search.remote seeds. Additionally, when checking for existence of proxy settings, we have to not only check cluster.remote proxy settings, but also fallback to search.remote proxy settings. This commit addresses both issues, and adds tests for these situations. --- .../transport/RemoteClusterAware.java | 36 +++++++++-- .../transport/RemoteClusterServiceTests.java | 62 +++++++++++++++---- 2 files changed, 83 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index ed4875238c0bc..a1776d04d340d 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -36,12 +36,16 @@ import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.NavigableSet; import java.util.Set; +import java.util.TreeSet; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -181,12 +185,36 @@ protected RemoteClusterAware(Settings settings) { * {@link TransportAddress#META_ADDRESS} and their configured address will be used as the hostname for the generated discovery node. */ protected static Map>>> buildRemoteClustersDynamicConfig(Settings settings) { - Stream>> allConcreteSettings = REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings); + final Map>>> remoteSeeds = + buildRemoteClustersDynamicConfig(settings, REMOTE_CLUSTERS_SEEDS); + final Map>>> searchRemoteSeeds = + buildRemoteClustersDynamicConfig(settings, SEARCH_REMOTE_CLUSTERS_SEEDS); + // sort the intersection for predictable output order + final NavigableSet intersection = + new TreeSet<>(Arrays.asList( + searchRemoteSeeds.keySet().stream().filter(s -> remoteSeeds.keySet().contains(s)).sorted().toArray(String[]::new))); + if (intersection.isEmpty() == false) { + final String message = String.format( + Locale.ROOT, + "found duplicate remote cluster configurations for cluster alias%s [%s]", + intersection.size() == 1 ? "" : "es", + String.join(",", intersection)); + throw new IllegalArgumentException(message); + } + return Stream + .concat(remoteSeeds.entrySet().stream(), searchRemoteSeeds.entrySet().stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + private static Map>>> buildRemoteClustersDynamicConfig( + final Settings settings, final Setting.AffixSetting> seedsSetting) { + final Stream>> allConcreteSettings = seedsSetting.getAllConcreteSettings(settings); return allConcreteSettings.collect( - Collectors.toMap(REMOTE_CLUSTERS_SEEDS::getNamespace, concreteSetting -> { - String clusterName = REMOTE_CLUSTERS_SEEDS.getNamespace(concreteSetting); + Collectors.toMap(seedsSetting::getNamespace, concreteSetting -> { + String clusterName = seedsSetting.getNamespace(concreteSetting); List addresses = concreteSetting.get(settings); - final boolean proxyMode = REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).exists(settings); + final boolean proxyMode = + REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).existsOrFallbackExists(settings); List> nodes = new ArrayList<>(addresses.size()); for (String address : addresses) { nodes.add(() -> buildSeedNode(clusterName, address, proxyMode)); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 6e92e70e4aed1..94ac7e963c1da 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -62,7 +62,10 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.startsWith; public class RemoteClusterServiceTests extends ESTestCase { @@ -120,17 +123,19 @@ public void testRemoteClusterSeedSetting() { public void testBuildRemoteClustersDynamicConfig() throws Exception { Map>>> map = RemoteClusterService.buildRemoteClustersDynamicConfig( - Settings.builder().put("cluster.remote.foo.seeds", "192.168.0.1:8080") - .put("cluster.remote.bar.seeds", "[::1]:9090") - .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") - .put("cluster.remote.boom.proxy", "foo.bar.com:1234").build()); - assertEquals(3, map.size()); - assertTrue(map.containsKey("foo")); - assertTrue(map.containsKey("bar")); - assertTrue(map.containsKey("boom")); - assertEquals(1, map.get("foo").v2().size()); - assertEquals(1, map.get("bar").v2().size()); - assertEquals(1, map.get("boom").v2().size()); + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("cluster.remote.bar.seeds", "[::1]:9090") + .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") + .put("cluster.remote.boom.proxy", "foo.bar.com:1234") + .put("search.remote.quux.seeds", "quux:9300") + .put("search.remote.quux.proxy", "quux-proxy:19300") + .build()); + assertThat(map.keySet(), containsInAnyOrder(equalTo("foo"), equalTo("bar"), equalTo("boom"), equalTo("quux"))); + assertThat(map.get("foo").v2(), hasSize(1)); + assertThat(map.get("bar").v2(), hasSize(1)); + assertThat(map.get("boom").v2(), hasSize(1)); + assertThat(map.get("quux").v2(), hasSize(1)); DiscoveryNode foo = map.get("foo").v2().get(0).get(); assertEquals("", map.get("foo").v1()); @@ -150,6 +155,41 @@ public void testBuildRemoteClustersDynamicConfig() throws Exception { assertEquals(boom.getId(), "boom#boom-node1.internal:1000"); assertEquals("foo.bar.com:1234", map.get("boom").v1()); assertEquals(boom.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); + + DiscoveryNode quux = map.get("quux").v2().get(0).get(); + assertEquals(quux.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0)); + assertEquals("quux", quux.getHostName()); + assertEquals(quux.getId(), "quux#quux:9300"); + assertEquals("quux-proxy:19300", map.get("quux").v1()); + assertEquals(quux.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); + + assertSettingDeprecationsAndWarnings(new String[]{"search.remote.quux.seeds", "search.remote.quux.proxy"}); + } + + public void testBuildRemoteClustersDynamicConfigWithDuplicate() { + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> RemoteClusterService.buildRemoteClustersDynamicConfig( + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("search.remote.foo.seeds", "192.168.0.1:8080") + .build())); + assertThat(e, hasToString(containsString("found duplicate remote cluster configurations for cluster alias [foo]"))); + assertSettingDeprecationsAndWarnings(new String[]{"search.remote.foo.seeds"}); + } + + public void testBuildRemoteClustersDynamicConfigWithDuplicates() { + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> RemoteClusterService.buildRemoteClustersDynamicConfig( + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("search.remote.foo.seeds", "192.168.0.1:8080") + .put("cluster.remote.bar.seeds", "192.168.0.1:8080") + .put("search.remote.bar.seeds", "192.168.0.1:8080") + .build())); + assertThat(e, hasToString(containsString("found duplicate remote cluster configurations for cluster aliases [bar,foo]"))); + assertSettingDeprecationsAndWarnings(new String[]{"search.remote.bar.seeds", "search.remote.foo.seeds"}); } public void testGroupClusterIndices() throws IOException { From 5f19146bace768255363c748fdcaa2c94766f299 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 27 Sep 2018 16:27:49 +0200 Subject: [PATCH 23/25] [SQL] Clean up LogicalPlanBuilder#doJoin (#34048) Currently the local `type` and `condition` variables are unused and can be removed. This code can be added later again if joins are supported. --- .../xpack/sql/parser/LogicalPlanBuilder.java | 27 ++----------------- 1 file changed, 2 insertions(+), 25 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java index 7d41e6e677dd4..23d2c20d3059a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.GroupByContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinCriteriaContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinRelationContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinTypeContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LimitClauseContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryContext; @@ -33,7 +32,6 @@ import org.elasticsearch.xpack.sql.plan.logical.Distinct; import org.elasticsearch.xpack.sql.plan.logical.Filter; import org.elasticsearch.xpack.sql.plan.logical.Join; -import org.elasticsearch.xpack.sql.plan.logical.Join.JoinType; import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; @@ -168,41 +166,20 @@ public LogicalPlan visitRelation(RelationContext ctx) { LogicalPlan result = plan(ctx.relationPrimary()); for (JoinRelationContext j : ctx.joinRelation()) { - result = doJoin(result, j); + result = doJoin(j); } return result; } - private Join doJoin(LogicalPlan left, JoinRelationContext ctx) { - JoinTypeContext joinType = ctx.joinType(); + private Join doJoin(JoinRelationContext ctx) { - @SuppressWarnings("unused") - Join.JoinType type = JoinType.INNER; - if (joinType != null) { - if (joinType.FULL() != null) { - type = JoinType.FULL; - } - if (joinType.LEFT() != null) { - type = JoinType.LEFT; - } - if (joinType.RIGHT() != null) { - type = JoinType.RIGHT; - } - } - - @SuppressWarnings("unused") - Expression condition = null; JoinCriteriaContext criteria = ctx.joinCriteria(); if (criteria != null) { if (criteria.USING() != null) { throw new UnsupportedOperationException(); } - if (criteria.booleanExpression() != null) { - condition = expression(criteria.booleanExpression()); - } } - // We would return this if we actually supported JOINs, but we don't yet. // new Join(source(ctx), left, plan(ctx.right), type, condition); throw new ParsingException(source(ctx), "Queries with JOIN are not yet supported"); From 949e4e9d1a4b37ded617c78a493039009b9e2879 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 27 Sep 2018 08:36:18 -0700 Subject: [PATCH 24/25] [DOCS] Synchronizes captialization in top-level titles (#33605) --- docs/reference/api-conventions.asciidoc | 2 +- docs/reference/getting-started.asciidoc | 2 +- docs/reference/index-modules.asciidoc | 2 +- docs/reference/ingest.asciidoc | 2 +- docs/reference/release-notes.asciidoc | 2 +- docs/reference/release-notes/7.0.0-alpha1.asciidoc | 2 +- docs/reference/release-notes/highlights.asciidoc | 4 ++-- docs/reference/sql/appendix/syntax-reserved.asciidoc | 2 +- docs/reference/sql/index.asciidoc | 2 +- x-pack/docs/en/watcher/index.asciidoc | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 42216a9a0fc14..be41c0fdc77e5 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -1,5 +1,5 @@ [[api-conventions]] -= API Conventions += API conventions [partintro] -- diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index c7737bbfd9b40..2763a31fb4eac 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -1,5 +1,5 @@ [[getting-started]] -= Getting Started += Getting started [partintro] -- diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 70c3d09dc930b..81bc96bb8f92e 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -1,6 +1,6 @@ [[index-modules]] -= Index Modules += Index modules [partintro] -- diff --git a/docs/reference/ingest.asciidoc b/docs/reference/ingest.asciidoc index 18349beab6ab1..772013534b63b 100644 --- a/docs/reference/ingest.asciidoc +++ b/docs/reference/ingest.asciidoc @@ -1,5 +1,5 @@ [[ingest]] -= Ingest Node += Ingest node [partintro] -- diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 490249461e5d1..ffea569ca21f3 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -1,5 +1,5 @@ [[es-release-notes]] -= Release Notes += Release notes [partintro] -- diff --git a/docs/reference/release-notes/7.0.0-alpha1.asciidoc b/docs/reference/release-notes/7.0.0-alpha1.asciidoc index c3a03d77f8118..eb1924d2452c2 100644 --- a/docs/reference/release-notes/7.0.0-alpha1.asciidoc +++ b/docs/reference/release-notes/7.0.0-alpha1.asciidoc @@ -1,5 +1,5 @@ [[release-notes-7.0.0-alpha1]] -== 7.0.0-alpha1 Release Notes +== 7.0.0-alpha1 release notes The changes listed below have been released for the first time in Elasticsearch 7.0.0-alpha1. diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 0ab4106c22c1f..5b20b67061d03 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -1,7 +1,7 @@ [[release-highlights]] -= {es} Release Highlights += {es} Release highlights ++++ -Release Highlights +Release highlights ++++ [partintro] diff --git a/docs/reference/sql/appendix/syntax-reserved.asciidoc b/docs/reference/sql/appendix/syntax-reserved.asciidoc index 7a502d6eea939..9d37b181cdfd1 100644 --- a/docs/reference/sql/appendix/syntax-reserved.asciidoc +++ b/docs/reference/sql/appendix/syntax-reserved.asciidoc @@ -2,7 +2,7 @@ [testenv="basic"] [appendix] [[sql-syntax-reserved]] -= Reserved Keywords += Reserved keywords Table with reserved keywords that need to be quoted. Also provide an example to make it more obvious. diff --git a/docs/reference/sql/index.asciidoc b/docs/reference/sql/index.asciidoc index aa9eebea7b7f6..d702245273028 100644 --- a/docs/reference/sql/index.asciidoc +++ b/docs/reference/sql/index.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[xpack-sql]] -= SQL Access += SQL access :sql-tests: {xes-repo-dir}/../../qa/sql :sql-specs: {sql-tests}/src/main/resources diff --git a/x-pack/docs/en/watcher/index.asciidoc b/x-pack/docs/en/watcher/index.asciidoc index 2be3638971929..5f51c948ebf3a 100644 --- a/x-pack/docs/en/watcher/index.asciidoc +++ b/x-pack/docs/en/watcher/index.asciidoc @@ -1,5 +1,5 @@ [[xpack-alerting]] -= Alerting on Cluster and Index Events += Alerting on cluster and index events [partintro] -- From 37be3e713cb8bae4c6dffa5405da19ea0569fbe9 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 27 Sep 2018 08:41:38 -0700 Subject: [PATCH 25/25] [DOCS] Synchronize location of Breaking Changes (#33588) --- docs/reference/index.asciidoc | 4 +- docs/reference/migration/migrate_7_0.asciidoc | 38 +++++++++---------- .../migrate_7_0/aggregations.asciidoc | 5 +++ .../migration/migrate_7_0/analysis.asciidoc | 5 +++ .../migration/migrate_7_0/api.asciidoc | 9 +++++ .../migration/migrate_7_0/cluster.asciidoc | 4 ++ .../migration/migrate_7_0/indices.asciidoc | 15 ++++++-- .../migration/migrate_7_0/java.asciidoc | 5 +++ .../migrate_7_0/low_level_restclient.asciidoc | 3 ++ .../migration/migrate_7_0/mappings.asciidoc | 9 +++++ .../migration/migrate_7_0/packaging.asciidoc | 3 ++ .../migration/migrate_7_0/plugins.asciidoc | 5 +++ .../migration/migrate_7_0/restclient.asciidoc | 3 ++ .../migration/migrate_7_0/scripting.asciidoc | 3 ++ .../migration/migrate_7_0/search.asciidoc | 15 +++++++- .../migration/migrate_7_0/settings.asciidoc | 9 ++++- .../migrate_7_0/snapshotstats.asciidoc | 2 + 17 files changed, 110 insertions(+), 27 deletions(-) diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index e4debd30c03ad..216983bc6f01d 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -29,8 +29,6 @@ include::setup/bootstrap-checks-xes.asciidoc[] :edit_url: include::upgrade.asciidoc[] -include::migration/index.asciidoc[] - include::api-conventions.asciidoc[] include::docs.asciidoc[] @@ -76,6 +74,8 @@ include::glossary.asciidoc[] include::release-notes/highlights.asciidoc[] +include::migration/index.asciidoc[] + include::release-notes.asciidoc[] include::redirects.asciidoc[] diff --git a/docs/reference/migration/migrate_7_0.asciidoc b/docs/reference/migration/migrate_7_0.asciidoc index 924a6984dc044..45f383435e4bc 100644 --- a/docs/reference/migration/migrate_7_0.asciidoc +++ b/docs/reference/migration/migrate_7_0.asciidoc @@ -1,30 +1,14 @@ [[breaking-changes-7.0]] == Breaking changes in 7.0 +++++ +7.0 +++++ This section discusses the changes that you need to be aware of when migrating your application to Elasticsearch 7.0. See also <> and <>. -[float] -=== Indices created before 7.0 - -Elasticsearch 7.0 can read indices created in version 6.0 or above. An -Elasticsearch 7.0 node will not start in the presence of indices created in a -version of Elasticsearch before 6.0. - -[IMPORTANT] -.Reindex indices from Elasticsearch 5.x or before -========================================= - -Indices created in Elasticsearch 5.x or before will need to be reindexed with -Elasticsearch 6.x in order to be readable by Elasticsearch 7.x. - -========================================= - -[float] -=== Also see: - * <> * <> * <> @@ -41,6 +25,22 @@ Elasticsearch 6.x in order to be readable by Elasticsearch 7.x. * <> * <> +[float] +=== Indices created before 7.0 + +Elasticsearch 7.0 can read indices created in version 6.0 or above. An +Elasticsearch 7.0 node will not start in the presence of indices created in a +version of Elasticsearch before 6.0. + +[IMPORTANT] +.Reindex indices from Elasticsearch 5.x or before +========================================= + +Indices created in Elasticsearch 5.x or before will need to be reindexed with +Elasticsearch 6.x in order to be readable by Elasticsearch 7.x. + +========================================= + include::migrate_7_0/aggregations.asciidoc[] include::migrate_7_0/analysis.asciidoc[] include::migrate_7_0/cluster.asciidoc[] diff --git a/docs/reference/migration/migrate_7_0/aggregations.asciidoc b/docs/reference/migration/migrate_7_0/aggregations.asciidoc index 08f181b2919e2..b29f741dd8579 100644 --- a/docs/reference/migration/migrate_7_0/aggregations.asciidoc +++ b/docs/reference/migration/migrate_7_0/aggregations.asciidoc @@ -1,21 +1,26 @@ +[float] [[breaking_70_aggregations_changes]] === Aggregations changes +[float] ==== Deprecated `global_ordinals_hash` and `global_ordinals_low_cardinality` execution hints for terms aggregations have been removed These `execution_hint` are removed and should be replaced by `global_ordinals`. +[float] ==== `search.max_buckets` in the cluster setting The dynamic cluster setting named `search.max_buckets` now defaults to 10,000 (instead of unlimited in the previous version). Requests that try to return more than the limit will fail with an exception. +[float] ==== `missing` option of the `composite` aggregation has been removed The `missing` option of the `composite` aggregation, deprecated in 6.x, has been removed. `missing_bucket` should be used instead. +[float] ==== Replaced `params._agg` with `state` context variable in scripted metric aggregations The object used to share aggregation state between the scripts in a Scripted Metric diff --git a/docs/reference/migration/migrate_7_0/analysis.asciidoc b/docs/reference/migration/migrate_7_0/analysis.asciidoc index 6e6cc5b078d61..e4b27def9f8cf 100644 --- a/docs/reference/migration/migrate_7_0/analysis.asciidoc +++ b/docs/reference/migration/migrate_7_0/analysis.asciidoc @@ -1,12 +1,15 @@ +[float] [[breaking_70_analysis_changes]] === Analysis changes +[float] ==== Limiting the number of tokens produced by _analyze To safeguard against out of memory errors, the number of tokens that can be produced using the `_analyze` endpoint has been limited to 10000. This default limit can be changed for a particular index with the index setting `index.analyze.max_token_count`. +[float] ==== Limiting the length of an analyzed text during highlighting Highlighting a text that was indexed without offsets or term vectors, @@ -16,6 +19,7 @@ To protect against this, the maximum number of characters that will be analyzed limited to 1000000. This default limit can be changed for a particular index with the index setting `index.highlight.max_analyzed_offset`. +[float] ==== `delimited_payload_filter` renaming The `delimited_payload_filter` was deprecated and renamed to `delimited_payload` in 6.2. @@ -23,6 +27,7 @@ Using it in indices created before 7.0 will issue deprecation warnings. Using th name in new indices created in 7.0 will throw an error. Use the new name `delimited_payload` instead. +[float] ==== `standard` filter has been removed The `standard` token filter has been removed because it doesn't change anything in the stream. diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index a58223023bd10..71a8e1aa0150c 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_api_changes]] === API changes +[float] ==== Camel case and underscore parameters deprecated in 6.x have been removed A number of duplicate parameters deprecated in 6.x have been removed from Bulk request, Multi Get request, Term Vectors request, and More Like This Query @@ -22,6 +24,7 @@ The following parameters starting with underscore have been removed: Instead of these removed parameters, use their non camel case equivalents without starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`. +[float] ==== Thread pool info In previous versions of Elasticsearch, the thread pool info returned in the @@ -48,10 +51,12 @@ aligns the output of the API with the configuration values for thread pools. Note that `core` and `max` will be populated for scaling thread pools, and `size` will be populated for fixed thread pools. +[float] ==== The parameter `fields` deprecated in 6.x has been removed from Bulk request and Update request. The Update API returns `400 - Bad request` if request contains unknown parameters (instead of ignored in the previous version). +[float] [[remove-suggest-metric]] ==== Remove support for `suggest` metric/index metric in indices stats and nodes stats APIs @@ -66,6 +71,7 @@ In the past, `fields` could be provided either as a parameter, or as part of the body. Specifying `fields` in the request body as opposed to a parameter was deprecated in 6.4.0, and is now unsupported in 7.0.0. +[float] ==== `copy_settings` is deprecated on shrink and split APIs Versions of Elasticsearch prior to 6.4.0 did not copy index settings on shrink @@ -76,10 +82,12 @@ will be for such settings to be copied on such operations. To enable users in the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in 8.0.0. +[float] ==== The deprecated stored script contexts have now been removed When putting stored scripts, support for storing them with the deprecated `template` context or without a context is now removed. Scripts must be stored using the `script` context as mentioned in the documentation. +[float] ==== Get Aliases API limitations when {security} is enabled removed The behavior and response codes of the get aliases API no longer vary @@ -88,6 +96,7 @@ depending on whether {security} is enabled. Previously a current user was not authorized for any alias. An empty response with status 200 - OK is now returned instead at all times. +[float] ==== Put User API response no longer has `user` object The Put User API response was changed in 6.5.0 to add the `created` field diff --git a/docs/reference/migration/migrate_7_0/cluster.asciidoc b/docs/reference/migration/migrate_7_0/cluster.asciidoc index e9584074d73d2..d518d29987d3b 100644 --- a/docs/reference/migration/migrate_7_0/cluster.asciidoc +++ b/docs/reference/migration/migrate_7_0/cluster.asciidoc @@ -1,16 +1,20 @@ +[float] [[breaking_70_cluster_changes]] === Cluster changes +[float] ==== `:` is no longer allowed in cluster name Due to cross-cluster search using `:` to separate a cluster and index name, cluster names may no longer contain `:`. +[float] ==== New default for `wait_for_active_shards` parameter of the open index command The default value for the `wait_for_active_shards` parameter of the open index API is changed from 0 to 1, which means that the command will now by default wait for all primary shards of the opened index to be allocated. +[float] ==== Shard preferences `_primary`, `_primary_first`, `_replica`, and `_replica_first` are removed These shard preferences are removed in favour of the `_prefer_nodes` and `_only_nodes` preferences. diff --git a/docs/reference/migration/migrate_7_0/indices.asciidoc b/docs/reference/migration/migrate_7_0/indices.asciidoc index a47cc6f4324a2..d040343d32eb4 100644 --- a/docs/reference/migration/migrate_7_0/indices.asciidoc +++ b/docs/reference/migration/migrate_7_0/indices.asciidoc @@ -1,17 +1,20 @@ +[float] [[breaking_70_indices_changes]] === Indices changes +[float] ==== `:` is no longer allowed in index name Due to cross-cluster search using `:` to separate a cluster and index name, index names may no longer contain `:`. +[float] ==== `index.unassigned.node_left.delayed_timeout` may no longer be negative Negative values were interpreted as zero in earlier versions but are no longer accepted. - +[float] ==== `_flush` and `_force_merge` will no longer refresh In previous versions issuing a `_flush` or `_force_merge` (with `flush=true`) @@ -20,7 +23,7 @@ visible to searches and non-realtime GET operations. From now on these operation don't have this side-effect anymore. To make documents visible an explicit `_refresh` call is needed unless the index is refreshed by the internal scheduler. - +[float] ==== Limit to the difference between max_size and min_size in NGramTokenFilter and NGramTokenizer To safeguard against creating too many index terms, the difference between `max_ngram` and @@ -29,7 +32,7 @@ limit can be changed with the index setting `index.max_ngram_diff`. Note that if exceeded a error is thrown only for new indices. For existing pre-7.0 indices, a deprecation warning is logged. - +[float] ==== Limit to the difference between max_size and min_size in ShingleTokenFilter To safeguard against creating too many tokens, the difference between `max_shingle_size` and @@ -38,6 +41,7 @@ limit can be changed with the index setting `index.max_shingle_diff`. Note that exceeded a error is thrown only for new indices. For existing pre-7.0 indices, a deprecation warning is logged. +[float] ==== Document distribution changes Indices created with version `7.0.0` onwards will have an automatic `index.number_of_routing_shards` @@ -46,6 +50,7 @@ shards the index has. In order to maintain the exact same distribution as a pre `index.number_of_routing_shards` must be set to the `index.number_of_shards` at index creation time. Note: if the number of routing shards equals the number of shards `_split` operations are not supported. +[float] ==== Skipped background refresh on search idle shards Shards belonging to an index that does not have an explicit @@ -56,6 +61,7 @@ that access a search idle shard will be "parked" until the next refresh happens. Indexing requests with `wait_for_refresh` will also trigger a background refresh. +[float] ==== Remove deprecated url parameters for Clear Indices Cache API The following previously deprecated url parameter have been removed: @@ -65,12 +71,14 @@ The following previously deprecated url parameter have been removed: * `request_cache` - use `request` instead * `field_data` - use `fielddata` instead +[float] ==== `network.breaker.inflight_requests.overhead` increased to 2 Previously the in flight requests circuit breaker considered only the raw byte representation. By bumping the value of `network.breaker.inflight_requests.overhead` from 1 to 2, this circuit breaker considers now also the memory overhead of representing the request as a structured object. +[float] ==== Parent circuit breaker changes The parent circuit breaker defines a new setting `indices.breaker.total.use_real_memory` which is @@ -79,6 +87,7 @@ heap memory instead of only considering the reserved memory by child circuit bre setting is `true`, the default parent breaker limit also changes from 70% to 95% of the JVM heap size. The previous behavior can be restored by setting `indices.breaker.total.use_real_memory` to `false`. +[float] ==== `fix` value for `index.shard.check_on_startup` is removed Deprecated option value `fix` for setting `index.shard.check_on_startup` is not supported. \ No newline at end of file diff --git a/docs/reference/migration/migrate_7_0/java.asciidoc b/docs/reference/migration/migrate_7_0/java.asciidoc index dde61259a21d3..7d68ff2fb5737 100644 --- a/docs/reference/migration/migrate_7_0/java.asciidoc +++ b/docs/reference/migration/migrate_7_0/java.asciidoc @@ -1,23 +1,28 @@ +[float] [[breaking_70_java_changes]] === Java API changes +[float] ==== `isShardsAcked` deprecated in `6.2` has been removed `isShardsAcked` has been replaced by `isShardsAcknowledged` in `CreateIndexResponse`, `RolloverResponse` and `CreateIndexClusterStateUpdateResponse`. +[float] ==== `prepareExecute` removed from the client api The `prepareExecute` method which created a request builder has been removed from the client api. Instead, construct a builder for the appropriate request directly. +[float] ==== Some Aggregation classes have moved packages * All classes present in `org.elasticsearch.search.aggregations.metrics.*` packages were moved to a single `org.elasticsearch.search.aggregations.metrics` package. +[float] ==== `Retry.withBackoff` methods with `Settings` removed The variants of `Retry.withBackoff` that included `Settings` have been removed diff --git a/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc b/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc index 77f5266763ffe..0820c7f01cc70 100644 --- a/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc +++ b/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_low_level_restclient_changes]] === Low-level REST client changes +[float] ==== Deprecated flavors of performRequest have been removed We deprecated the flavors of `performRequest` and `performRequestAsync` that @@ -8,6 +10,7 @@ do not take `Request` objects in 6.4.0 in favor of the flavors that take `Request` objects because those methods can be extended without breaking backwards compatibility. +[float] ==== Removed setHosts We deprecated `setHosts` in 6.4.0 in favor of `setNodes` because it supports diff --git a/docs/reference/migration/migrate_7_0/mappings.asciidoc b/docs/reference/migration/migrate_7_0/mappings.asciidoc index 4983cb2da579a..5ee1615796c98 100644 --- a/docs/reference/migration/migrate_7_0/mappings.asciidoc +++ b/docs/reference/migration/migrate_7_0/mappings.asciidoc @@ -1,36 +1,44 @@ +[float] [[breaking_70_mappings_changes]] === Mapping changes +[float] ==== The `_all` meta field is removed The `_all` field deprecated in 6 have now been removed. +[float] ==== The `_uid` meta field is removed This field used to index a composite key formed of the `_type` and the `_id`. Now that indices cannot have multiple types, this has been removed in favour of `_id`. +[float] ==== The `_default_` mapping is no longer allowed The `_default_` mapping has been deprecated in 6.0 and is now no longer allowed in 7.0. Trying to configure a `_default_` mapping on 7.x indices will result in an error. +[float] ==== `index_options` for numeric fields has been removed The `index_options` field for numeric fields has been deprecated in 6 and has now been removed. +[float] ==== Limiting the number of `nested` json objects To safeguard against out of memory errors, the number of nested json objects within a single document across all fields has been limited to 10000. This default limit can be changed with the index setting `index.mapping.nested_objects.limit`. +[float] ==== The `update_all_types` option has been removed This option is useless now that all indices have at most one type. +[float] ==== The `classic` similarity has been removed The `classic` similarity relied on coordination factors for scoring to be good @@ -39,6 +47,7 @@ Lucene, which means that the `classic` similarity now produces scores of lower quality. It is advised to switch to `BM25` instead, which is widely accepted as a better alternative. +[float] ==== Similarities fail when unsupported options are provided An error will now be thrown when unknown configuration options are provided diff --git a/docs/reference/migration/migrate_7_0/packaging.asciidoc b/docs/reference/migration/migrate_7_0/packaging.asciidoc index 934522db7162f..e2380613d8f7b 100644 --- a/docs/reference/migration/migrate_7_0/packaging.asciidoc +++ b/docs/reference/migration/migrate_7_0/packaging.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_packaging_changes]] === Packaging changes +[float] [[systemd-service-file-config]] ==== systemd service file is no longer configuration @@ -9,6 +11,7 @@ was previously marked as a configuration file in rpm and deb packages. Overrides to the systemd elasticsearch service should be made in `/etc/systemd/system/elasticsearch.service.d/override.conf`. +[float] ==== tar package no longer includes windows specific files The tar package previously included files in the `bin` directory meant only diff --git a/docs/reference/migration/migrate_7_0/plugins.asciidoc b/docs/reference/migration/migrate_7_0/plugins.asciidoc index 462823a61fd00..5fcd2bb95261c 100644 --- a/docs/reference/migration/migrate_7_0/plugins.asciidoc +++ b/docs/reference/migration/migrate_7_0/plugins.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_plugins_changes]] === Plugins changes +[float] ==== Azure Repository plugin * The legacy azure settings which where starting with `cloud.azure.storage.` prefix have been removed. @@ -12,6 +14,7 @@ You must set it per azure client instead. Like `azure.client.default.timeout: 10 See {plugins}/repository-azure-repository-settings.html#repository-azure-repository-settings[Azure Repository settings]. +[float] ==== Google Cloud Storage Repository plugin * The repository settings `application_name`, `connect_timeout` and `read_timeout` have been removed and @@ -19,11 +22,13 @@ must now be specified in the client settings instead. See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Storage Client Settings]. +[float] ==== Analysis Plugin changes * The misspelled helper method `requriesAnalysisSettings(AnalyzerProvider provider)` has been renamed to `requiresAnalysisSettings` +[float] ==== File-based discovery plugin * This plugin has been removed since its functionality is now part of diff --git a/docs/reference/migration/migrate_7_0/restclient.asciidoc b/docs/reference/migration/migrate_7_0/restclient.asciidoc index 470996cfeffec..3c0237db6e7b0 100644 --- a/docs/reference/migration/migrate_7_0/restclient.asciidoc +++ b/docs/reference/migration/migrate_7_0/restclient.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_restclient_changes]] === High-level REST client changes +[float] ==== API methods accepting `Header` argument have been removed All API methods accepting headers as a `Header` varargs argument, deprecated @@ -12,6 +14,7 @@ In case you are specifying headers e.g. `client.index(indexRequest, new Header("name" "value"))` becomes `client.index(indexRequest, RequestOptions.DEFAULT.toBuilder().addHeader("name", "value").build());` +[float] ==== Cluster Health API default to `cluster` level The Cluster Health API used to default to `shards` level to ease migration diff --git a/docs/reference/migration/migrate_7_0/scripting.asciidoc b/docs/reference/migration/migrate_7_0/scripting.asciidoc index 79380f84204ed..de312c1c7231c 100644 --- a/docs/reference/migration/migrate_7_0/scripting.asciidoc +++ b/docs/reference/migration/migrate_7_0/scripting.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_scripting_changes]] === Scripting changes +[float] ==== getDate() and getDates() removed Fields of type `long` and `date` had `getDate()` and `getDates()` methods @@ -12,6 +14,7 @@ now been removed. Instead, use `.value` on `date` fields, or explicitly parse `long` fields into a date object using `Instance.ofEpochMillis(doc["myfield"].value)`. +[float] ==== Script errors will return as `400` error codes Malformed scripts, either in search templates, ingest pipelines or search diff --git a/docs/reference/migration/migrate_7_0/search.asciidoc b/docs/reference/migration/migrate_7_0/search.asciidoc index a7d32896e9723..b7aa15861afad 100644 --- a/docs/reference/migration/migrate_7_0/search.asciidoc +++ b/docs/reference/migration/migrate_7_0/search.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_search_changes]] === Search and Query DSL changes +[float] ==== Changes to queries * The default value for `transpositions` parameter of `fuzzy` query has been changed to `true`. @@ -18,6 +20,7 @@ * Attempts to generate multi-term phrase queries against non-text fields with a custom analyzer will now throw an exception +[float] ==== Adaptive replica selection enabled by default Adaptive replica selection has been enabled by default. If you wish to return to @@ -35,6 +38,7 @@ PUT /_cluster/settings -------------------------------------------------- // CONSOLE +[float] ==== Search API returns `400` for invalid requests The Search API returns `400 - Bad request` while it would previously return @@ -48,12 +52,14 @@ The Search API returns `400 - Bad request` while it would previously return * number of filters in the adjacency matrix aggregation is too large * script compilation errors +[float] ==== Scroll queries cannot use the `request_cache` anymore Setting `request_cache:true` on a query that creates a scroll (`scroll=1m`) has been deprecated in 6 and will now return a `400 - Bad request`. Scroll queries are not meant to be cached. +[float] ==== Scroll queries cannot use `rescore` anymore Including a rescore clause on a query that creates a scroll (`scroll=1m`) has been deprecated in 6.5 and will now return a `400 - Bad request`. Allowing @@ -61,6 +67,7 @@ rescore on scroll queries would break the scroll sort. In the 6.x line, the rescore clause was silently ignored (for scroll queries), and it was allowed in the 5.x line. +[float] ==== Term Suggesters supported distance algorithms The following string distance algorithms were given additional names in 6.2 and @@ -70,7 +77,7 @@ removed. * `levenstein` - replaced by `levenshtein` * `jarowinkler` - replaced by `jaro_winkler` - +[float] ==== Limiting the number of terms that can be used in a Terms Query request Executing a Terms Query with a lot of terms may degrade the cluster performance, @@ -79,7 +86,7 @@ To safeguard against this, the maximum number of terms that can be used in a Terms Query request has been limited to 65536. This default maximum can be changed for a particular index with the index setting `index.max_terms_count`. - +[float] ==== Limiting the length of regex that can be used in a Regexp Query request Executing a Regexp Query with a long regex string may degrade search performance. @@ -87,11 +94,13 @@ To safeguard against this, the maximum length of regex that can be used in a Regexp Query request has been limited to 1000. This default maximum can be changed for a particular index with the index setting `index.max_regex_length`. +[float] ==== Invalid `_search` request body Search requests with extra content after the main object will no longer be accepted by the `_search` endpoint. A parsing exception will be thrown instead. +[float] ==== Context Completion Suggester The ability to query and index context enabled suggestions without context, @@ -102,12 +111,14 @@ considerably. For geo context the value of the `path` parameter is now validated against the mapping, and the context is only accepted if `path` points to a field with `geo_point` type. +[float] ==== Semantics changed for `max_concurrent_shard_requests` `max_concurrent_shard_requests` used to limit the total number of concurrent shard requests a single high level search request can execute. In 7.0 this changed to be the max number of concurrent shard requests per node. The default is now `5`. +[float] ==== `max_score` set to `null` when scores are not tracked `max_score` used to be set to `0` whenever scores are not tracked. `null` is now used diff --git a/docs/reference/migration/migrate_7_0/settings.asciidoc b/docs/reference/migration/migrate_7_0/settings.asciidoc index e4b132b38d60d..85648da4f0d25 100644 --- a/docs/reference/migration/migrate_7_0/settings.asciidoc +++ b/docs/reference/migration/migrate_7_0/settings.asciidoc @@ -1,18 +1,21 @@ +[float] [[breaking_70_settings_changes]] - === Settings changes +[float] ==== The default for `node.name` is now the hostname `node.name` now defaults to the hostname at the time when Elasticsearch is started. Previously the default node name was the first eight characters of the node id. It can still be configured explicitly in `elasticsearch.yml`. +[float] ==== Percolator * The deprecated `index.percolator.map_unmapped_fields_as_string` setting has been removed in favour of the `index.percolator.map_unmapped_fields_as_text` setting. +[float] ==== Index thread pool * Internally, single-document index/delete/update requests are executed as bulk @@ -21,6 +24,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`. longer needed and has been removed. As such, the settings `thread_pool.index.size` and `thread_pool.index.queue_size` have been removed. +[float] [[write-thread-pool-fallback]] ==== Write thread pool fallback @@ -32,6 +36,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`. available to keep the display output in APIs as `bulk` instead of `write`. These fallback settings and this system property have been removed. +[float] [[remove-http-enabled]] ==== Http enabled setting removed @@ -39,6 +44,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`. use of the transport client. This setting has been removed, as the transport client will be removed in the future, thus requiring HTTP to always be enabled. +[float] [[remove-http-pipelining-setting]] ==== Http pipelining setting removed @@ -47,6 +53,7 @@ This setting has been removed, as disabling http pipelining support on the serve provided little value. The setting `http.pipelining.max_events` can still be used to limit the number of pipelined requests in-flight. +[float] ==== Cross-cluster search settings renamed The cross-cluster search remote cluster connection infrastructure is also used diff --git a/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc b/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc index 6dbd24b13a1eb..2098eb3574ca8 100644 --- a/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc +++ b/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc @@ -1,3 +1,4 @@ +[float] [[breaking_70_snapshotstats_changes]] === Snapshot stats changes @@ -7,6 +8,7 @@ Snapshot stats details are provided in a new structured way: * `incremental` section for those files that actually needed to be copied over as part of the incremental snapshotting. * In case of a snapshot that's still in progress, there's also a `processed` section for files that are in the process of being copied. +[float] ==== Deprecated `number_of_files`, `processed_files`, `total_size_in_bytes` and `processed_size_in_bytes` snapshot stats properties have been removed * Properties `number_of_files` and `total_size_in_bytes` are removed and should be replaced by values of nested object `total`.