diff --git a/README.md b/README.md index b48bd71..2f13f1f 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ This is a multi bucket aggregation. Installation ------------ -`bin/plugin --install path_hierarchy --url "https://github.com/opendatasoft/elasticsearch-aggregation-pathhierarchy/releases/download/v7.17.6.1/pathhierarchy-aggregation-7.17.6.1.zip"` +`bin/plugin --install path_hierarchy --url "https://github.com/opendatasoft/elasticsearch-aggregation-pathhierarchy/releases/download/v8.15.2.1/pathhierarchy-aggregation-8.15.2.1.zip"` Build ----- @@ -309,7 +309,7 @@ Built with Java 17. The first 3 digits of plugin version is Elasticsearch versioning. The last digit is used for plugin versioning under an elasticsearch version. To install it, launch this command in Elasticsearch directory replacing the url with a release suiting your case (please check available releases [here](https://github.com/opendatasoft/elasticsearch-aggregation-pathhierarchy/releases)): -`./bin/elasticsearch-plugin install https://github.com/opendatasoft/elasticsearch-aggregation-pathhierarchy/releases/download/v7.17.6.1/pathhierarchy-aggregation-7.17.6.1.zip` +`./bin/elasticsearch-plugin install https://github.com/opendatasoft/elasticsearch-aggregation-pathhierarchy/releases/download/v8.15.2.1/pathhierarchy-aggregation-8.15.2.1.zip` License diff --git a/build.gradle b/build.gradle index 6cb4d24..0654a0f 100644 --- a/build.gradle +++ b/build.gradle @@ -32,6 +32,6 @@ esplugin { dependencies { implementation "org.elasticsearch:elasticsearch:${es_version}" - yamlRestTestImplementation "org.elasticsearch.test:framework:${es_version}" - yamlRestTestImplementation "org.apache.logging.log4j:log4j-core:2.17.1" + testImplementation "org.apache.logging.log4j:log4j-core:2.17.1" + testImplementation "org.elasticsearch.test:framework:${es_version}" } \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 8dc6a09..f2b490e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,4 @@ -version: "3.7" +version: "3.8" networks: es-network: @@ -11,6 +11,7 @@ services: target: elasticsearch-plugin-debug environment: - discovery.type=single-node + - xpack.security.enabled=false # NO DEBUG # - ES_JAVA_OPTS=-Xms512m -Xmx512m # DEBUG @@ -23,7 +24,7 @@ services: - es-network kibana: - image: docker.elastic.co/kibana/kibana:7.17.5 + image: docker.elastic.co/kibana/kibana:8.15.2 environment: ELASTICSEARCH_HOSTS: http://elasticsearch-plugin-debug:9200/ ports: diff --git a/docker/Dockerfile b/docker/Dockerfile index 4243213..65cfc0a 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.elastic.co/elasticsearch/elasticsearch:7.17.6 AS elasticsearch-plugin-debug +FROM docker.elastic.co/elasticsearch/elasticsearch:8.15.2 AS elasticsearch-plugin-debug -COPY /build/distributions/pathhierarchy-aggregation-7.17.6.1.zip /tmp/pathhierarchy-aggregation-7.17.6.1.zip -RUN ./bin/elasticsearch-plugin install file:/tmp/pathhierarchy-aggregation-7.17.6.1.zip +COPY /build/distributions/pathhierarchy-aggregation-8.15.2.1.zip /tmp/pathhierarchy-aggregation-8.15.2.1.zip +RUN ./bin/elasticsearch-plugin install file:/tmp/pathhierarchy-aggregation-8.15.2.1.zip diff --git a/gradle.properties b/gradle.properties index f24b89b..ef68291 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,2 +1,2 @@ -es_version = 7.17.6 -plugin_version = 7.17.6.1 +es_version = 8.15.2 +plugin_version = 8.15.2.1 diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index e708b1c..249e583 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index ae04661..48c0a02 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 4f906e0..a69d9cb 100755 --- a/gradlew +++ b/gradlew @@ -1,7 +1,7 @@ -#!/usr/bin/env sh +#!/bin/sh # -# Copyright 2015 the original author or authors. +# Copyright © 2015-2021 the original authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,67 +17,101 @@ # ############################################################################## -## -## Gradle start up script for UN*X -## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# ############################################################################## # Attempt to set APP_HOME + # Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` +APP_BASE_NAME=${0##*/} # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" +MAX_FD=maximum warn () { echo "$*" -} +} >&2 die () { echo echo "$*" echo exit 1 -} +} >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar @@ -87,9 +121,9 @@ CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACMD=$JAVA_HOME/jre/sh/java else - JAVACMD="$JAVA_HOME/bin/java" + JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME @@ -98,7 +132,7 @@ Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else - JAVACMD="java" + JAVACMD=java which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the @@ -106,80 +140,101 @@ location of your Java installation." fi # Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac fi -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. # For Cygwin or MSYS, switch paths to Windows format before running java -if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) fi - i=`expr $i + 1` + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg done - case $i in - 0) set -- ;; - 1) set -- "$args0" ;; - 2) set -- "$args0" "$args1" ;; - 3) set -- "$args0" "$args1" "$args2" ;; - 4) set -- "$args0" "$args1" "$args2" "$args3" ;; - 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac fi -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=`save "$@"` +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat index ac1b06f..f127cfd 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -1,89 +1,91 @@ -@rem -@rem Copyright 2015 the original author or authors. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem https://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. -@rem - -@if "%DEBUG%" == "" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Resolve any "." and ".." in APP_HOME to make it shorter. -for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto execute - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* - -:end -@rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/DateHierarchyAggregationBuilder.java b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/DateHierarchyAggregationBuilder.java index 4642461..3c732e6 100644 --- a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/DateHierarchyAggregationBuilder.java +++ b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/DateHierarchyAggregationBuilder.java @@ -1,6 +1,7 @@ package org.opendatasoft.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.Version; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; @@ -41,6 +42,10 @@ * The builder of the aggregatorFactory. Also implements the parsing of the request. */ public class DateHierarchyAggregationBuilder extends ValuesSourceAggregationBuilder { + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.V_8_0_0; + } public static final String NAME = "date_hierarchy"; public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>(NAME, DateHierarchyAggregationSupplier.class); @@ -135,8 +140,8 @@ public void writeTo(StreamOutput out) throws IOException { public static final ObjectParser PARSER = ObjectParser.fromBuilder(NAME, DateHierarchyAggregationBuilder::new); static { - - ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, true); + // ES 8.x introduces field validation. Setting timezoneAware to false to avoid duplication of the timezone field + ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); PARSER.declareString(DateHierarchyAggregationBuilder::interval, INTERVAL_FIELD); @@ -176,7 +181,7 @@ private DateHierarchyAggregationBuilder(String name) { } @Override - protected boolean serializeTargetValueType(Version version) { + protected boolean serializeTargetValueType(TransportVersion version) { return true; } @@ -408,8 +413,5 @@ public boolean equals(Object obj) { public String getType() { return NAME; } - - @Override - protected ValuesSourceRegistry.RegistryKey getRegistryKey() { return REGISTRY_KEY; } } diff --git a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/DateHierarchyAggregator.java b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/DateHierarchyAggregator.java index 0dc4db1..f05a59f 100644 --- a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/DateHierarchyAggregator.java +++ b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/DateHierarchyAggregator.java @@ -1,10 +1,8 @@ package org.opendatasoft.elasticsearch.search.aggregations.bucket; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -12,6 +10,7 @@ import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BucketOrder; @@ -154,11 +153,11 @@ public boolean equals(Object obj) { * The LeafBucketCollector is a "Per-leaf bucket collector". It collects docs for the account of buckets. */ @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(AggregationExecutionContext ctx, LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } - final SortedNumericDocValues values = valuesSource.longValues(ctx); + final SortedNumericDocValues values = valuesSource.longValues(ctx.getLeafReaderContext()); return new LeafBucketCollectorBase(sub, values) { diff --git a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/InternalDateHierarchy.java b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/InternalDateHierarchy.java index c46e978..fbd2a58 100644 --- a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/InternalDateHierarchy.java +++ b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/InternalDateHierarchy.java @@ -4,7 +4,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.AggregationReduceContext; +import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -15,11 +16,13 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.TreeMap; /** * An internal implementation of {@link InternalMultiBucketAggregation} @@ -27,8 +30,53 @@ * Mainly, returns the builder and makes the reduce of buckets. */ public class InternalDateHierarchy extends InternalMultiBucketAggregation { + InternalDateHierarchy.InternalBucket> { + @Override + protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { + Map> buckets = new LinkedHashMap<>(); + + return new AggregatorReducer() { + private long otherHierarchyNodes = 0; + + @Override + public void accept(InternalAggregation aggregation) { + InternalDateHierarchy dateHierarchy = (InternalDateHierarchy) aggregation; + + otherHierarchyNodes += dateHierarchy.getSumOtherHierarchyNodes(); + + for (InternalBucket bucket : dateHierarchy.buckets) { + List existingBuckets = buckets.get(bucket.key); + if (existingBuckets == null) { + existingBuckets = new ArrayList<>(size); + buckets.put(bucket.key, existingBuckets); + } + existingBuckets.add(bucket); + } + } + + @Override + public InternalAggregation get() { + final int size = !reduceContext.isFinalReduce() ? buckets.size() : Math.min(requiredSize, buckets.size()); + PathSortedTree ordered = new PathSortedTree<>(order.comparator(), size); + + for (List sameTermBuckets : buckets.values()) { + final InternalBucket b = reduceBucket(sameTermBuckets, reduceContext); + if (b.getDocCount() >= minDocCount || !reduceContext.isFinalReduce()) { + reduceContext.consumeBucketsAndMaybeBreak(1); + ordered.add(b.paths, b); + } else { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(b)); + } + } + + long sum_other_hierarchy_nodes = ordered.getFullSize() - size + otherHierarchyNodes; + + return new InternalDateHierarchy(getName(), ordered.getAsList(), order, minDocCount, requiredSize, shardSize, + sum_other_hierarchy_nodes, getMetadata()); + } + }; + } /** * The bucket class of InternalDateHierarchy. * @see MultiBucketsAggregation.Bucket @@ -106,7 +154,7 @@ public long getDocCount() { } @Override - public Aggregations getAggregations() { + public InternalAggregations getAggregations() { return aggregations; } @@ -210,54 +258,7 @@ public List getBuckets() { return buckets; } - /** - * Reduces the given aggregations to a single one and returns it. - */ - @Override - public InternalDateHierarchy reduce(List aggregations, ReduceContext reduceContext) { - Map> buckets = null; - long otherHierarchyNodes = 0; - - // extract buckets from aggregations - for (InternalAggregation aggregation : aggregations) { - InternalDateHierarchy dateHierarchy = (InternalDateHierarchy) aggregation; - if (buckets == null) { - buckets = new LinkedHashMap<>(); - } - - otherHierarchyNodes += dateHierarchy.getSumOtherHierarchyNodes(); - - for (InternalBucket bucket : dateHierarchy.buckets) { - List existingBuckets = buckets.get(bucket.key); - if (existingBuckets == null) { - existingBuckets = new ArrayList<>(aggregations.size()); - buckets.put(bucket.key, existingBuckets); - } - existingBuckets.add(bucket); - } - } - - // reduce and sort buckets depending of ordering rules - final int size = !reduceContext.isFinalReduce() ? buckets.size() : Math.min(requiredSize, buckets.size()); - PathSortedTree ordered = new PathSortedTree<>(order.comparator(), size); - for (List sameTermBuckets : buckets.values()) { - - final InternalBucket b = reduceBucket(sameTermBuckets, reduceContext); - if (b.getDocCount() >= minDocCount || !reduceContext.isFinalReduce()) { - reduceContext.consumeBucketsAndMaybeBreak(1); - ordered.add(b.paths, b); - } else { - reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(b)); - } - } - - long sum_other_hierarchy_nodes = ordered.getFullSize() - size + otherHierarchyNodes; - return new InternalDateHierarchy(getName(), ordered.getAsList(), order, minDocCount, requiredSize, shardSize, - sum_other_hierarchy_nodes, getMetadata()); - } - - @Override - protected InternalBucket reduceBucket(List buckets, ReduceContext context) { + protected InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { List aggregationsList = new ArrayList<>(buckets.size()); InternalBucket reduced = null; for (InternalBucket bucket : buckets) { diff --git a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/InternalPathHierarchy.java b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/InternalPathHierarchy.java index 3c3651b..88e4868 100644 --- a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/InternalPathHierarchy.java +++ b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/InternalPathHierarchy.java @@ -6,7 +6,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.AggregationReduceContext; +import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -33,6 +34,60 @@ public class InternalPathHierarchy extends InternalMultiBucketAggregation> buckets = new TreeMap<>(); + + return new AggregatorReducer() { + // Need a global otherHierarchyNodes counter that is increased in accept() and used in get() + private long otherHierarchyNodes = 0; + + @Override + public void accept(InternalAggregation aggregation) { + InternalPathHierarchy pathHierarchy = (InternalPathHierarchy) aggregation; + otherHierarchyNodes += pathHierarchy.getSumOtherHierarchyNodes(); + + for (InternalBucket bucket : pathHierarchy.buckets) { + List existingBuckets = buckets.get(bucket.termBytes); + if (existingBuckets == null) { + existingBuckets = new ArrayList<>(size); + buckets.put(bucket.termBytes, existingBuckets); + } + existingBuckets.add(bucket); + } + } + + @Override + public InternalAggregation get() { + // reduce and sort buckets depending of ordering rules + final int size = !reduceContext.isFinalReduce() ? buckets.size() : Math.min(requiredSize, buckets.size()); + PathSortedTree ordered = new PathSortedTree<>(order.comparator(), size); + + for (List sameTermBuckets : buckets.values()) { + final InternalBucket b = reduceBucket(sameTermBuckets, reduceContext); + if (b.getDocCount() >= minDocCount || !reduceContext.isFinalReduce()) { + reduceContext.consumeBucketsAndMaybeBreak(1); + String [] pathsForTree; + if (b.minDepth > 0) { + pathsForTree = Arrays.copyOfRange(b.paths, b.minDepth, b.paths.length); + } else { + pathsForTree = b.paths; + } + ordered.add(pathsForTree, b); + } else { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(b)); + } + } + + long sum_other_hierarchy_nodes = ordered.getFullSize() - size + otherHierarchyNodes; + + return new InternalPathHierarchy(getName(), ordered.getAsList(), order, minDocCount, requiredSize, shardSize, + sum_other_hierarchy_nodes, separator, getMetadata()); + + } + }; + } + /** * The bucket class of InternalPathHierarchy. * @see MultiBucketsAggregation.Bucket @@ -115,7 +170,7 @@ public long getDocCount() { } @Override - public Aggregations getAggregations() { + public InternalAggregations getAggregations() { return aggregations; } @@ -129,7 +184,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } - private List buckets; private BytesRef separator; private BucketOrder order; @@ -224,62 +278,10 @@ public List getBuckets() { return buckets; } - /** - * Reduces the given aggregations to a single one and returns it. - */ - @Override - public InternalPathHierarchy reduce(List aggregations, ReduceContext reduceContext) { - Map> buckets = null; - long otherHierarchyNodes = 0; - - // extract buckets from aggregations - for (InternalAggregation aggregation : aggregations) { - InternalPathHierarchy pathHierarchy = (InternalPathHierarchy) aggregation; - if (buckets == null) { - buckets = new TreeMap<>(); - } - - otherHierarchyNodes += pathHierarchy.getSumOtherHierarchyNodes(); - - for (InternalBucket bucket : pathHierarchy.buckets) { - List existingBuckets = buckets.get(bucket.termBytes); - if (existingBuckets == null) { - existingBuckets = new ArrayList<>(aggregations.size()); - buckets.put(bucket.termBytes, existingBuckets); - } - existingBuckets.add(bucket); - } - } - - // reduce and sort buckets depending of ordering rules - final int size = !reduceContext.isFinalReduce() ? buckets.size() : Math.min(requiredSize, buckets.size()); - PathSortedTree ordered = new PathSortedTree<>(order.comparator(), size); - for (List sameTermBuckets : buckets.values()) { - final InternalBucket b = reduceBucket(sameTermBuckets, reduceContext); - if (b.getDocCount() >= minDocCount || !reduceContext.isFinalReduce()) { - reduceContext.consumeBucketsAndMaybeBreak(1); - String [] pathsForTree; - if (b.minDepth > 0) { - pathsForTree = Arrays.copyOfRange(b.paths, b.minDepth, b.paths.length); - } else { - pathsForTree = b.paths; - } - ordered.add(pathsForTree, b); - } else { - reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(b)); - } - } - - long sum_other_hierarchy_nodes = ordered.getFullSize() - size + otherHierarchyNodes; - return new InternalPathHierarchy(getName(), ordered.getAsList(), order, minDocCount, requiredSize, shardSize, - sum_other_hierarchy_nodes, separator, getMetadata()); - } - /** * Utility method of InternalPathHierarchy.doReduce() */ - @Override - protected InternalBucket reduceBucket(List buckets, ReduceContext context) { + protected InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { List aggregationsList = new ArrayList<>(buckets.size()); InternalBucket reduced = null; for (InternalBucket bucket : buckets) { diff --git a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregationBuilder.java b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregationBuilder.java index e134aa6..e5eb2b8 100644 --- a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregationBuilder.java +++ b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregationBuilder.java @@ -1,6 +1,7 @@ package org.opendatasoft.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.Version; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,6 +32,10 @@ * The builder of the aggregatorFactory. Also implements the parsing of the request. */ public class PathHierarchyAggregationBuilder extends ValuesSourceAggregationBuilder { + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.V_8_0_0; + } public static final String NAME = "path_hierarchy"; public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>(NAME, PathHierarchyAggregationSupplier.class); @@ -92,7 +97,7 @@ private PathHierarchyAggregationBuilder(String name) { } @Override - protected boolean serializeTargetValueType(Version version) { + protected boolean serializeTargetValueType(TransportVersion version) { return true; } @@ -344,8 +349,5 @@ public boolean equals(Object obj) { public String getType() { return NAME; } - - @Override - protected ValuesSourceRegistry.RegistryKey getRegistryKey() { return REGISTRY_KEY; } } diff --git a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregator.java b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregator.java index aa6ef53..4b68e85 100644 --- a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregator.java +++ b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregator.java @@ -12,6 +12,7 @@ import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; +import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BucketOrder; @@ -157,11 +158,11 @@ public boolean equals(Object obj) { * The LeafBucketCollector is a "Per-leaf bucket collector". It collects docs for the account of buckets. */ @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(AggregationExecutionContext ctx, LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } - final SortedBinaryDocValues values = valuesSource.bytesValues(ctx); + final SortedBinaryDocValues values = valuesSource.bytesValues(ctx.getLeafReaderContext()); return new LeafBucketCollectorBase(sub, values) { final BytesRefBuilder previous = new BytesRefBuilder(); /** diff --git a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregatorFactory.java b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregatorFactory.java index 0c383f9..13671ec 100644 --- a/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregatorFactory.java +++ b/src/main/java/org/opendatasoft/elasticsearch/search/aggregations/bucket/PathHierarchyAggregatorFactory.java @@ -4,7 +4,6 @@ import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.FutureArrays; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortingBinaryDocValues; import org.elasticsearch.search.aggregations.Aggregator; @@ -26,6 +25,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Map; +import java.util.Arrays; /** * The factory of aggregators. @@ -166,7 +166,7 @@ public boolean advanceExact(int docId) throws IOException { for (int offset=0; offset < val.length; offset++) { // it is a separator if (val.length - offset >= separator.length && - FutureArrays.equals( + Arrays.equals( separator.bytes, separator.offset, separator.offset + separator.length, val.bytes, val.offset + offset, val.offset + offset + separator.length)) { // ignore separator at the beginning