diff --git a/docs/changelog/98038.yaml b/docs/changelog/98038.yaml new file mode 100644 index 0000000000000..d99db24664f30 --- /dev/null +++ b/docs/changelog/98038.yaml @@ -0,0 +1,6 @@ +pr: 98038 +summary: Update enrich execution to only set index false on fields that support it +area: Ingest Node +type: bug +issues: + - 98019 diff --git a/docs/changelog/98847.yaml b/docs/changelog/98847.yaml new file mode 100644 index 0000000000000..ab7455bd783c3 --- /dev/null +++ b/docs/changelog/98847.yaml @@ -0,0 +1,5 @@ +pr: 98847 +summary: "ESQL: Add `CEIL` function" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/98864.yaml b/docs/changelog/98864.yaml new file mode 100644 index 0000000000000..52f5b1b0ad70a --- /dev/null +++ b/docs/changelog/98864.yaml @@ -0,0 +1,5 @@ +pr: 98864 +summary: "[Profiling] Abort index creation on outdated index" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/98878.yaml b/docs/changelog/98878.yaml new file mode 100644 index 0000000000000..4fa8b23851bf9 --- /dev/null +++ b/docs/changelog/98878.yaml @@ -0,0 +1,5 @@ +pr: 98878 +summary: Fix percolator query for stored queries that expand on wildcard field names +area: Percolator +type: bug +issues: [] diff --git a/docs/changelog/98942.yaml b/docs/changelog/98942.yaml new file mode 100644 index 0000000000000..4d8eeee5192e5 --- /dev/null +++ b/docs/changelog/98942.yaml @@ -0,0 +1,5 @@ +pr: 98942 +summary: "ESQL: LEFT function" +area: ES|QL +type: feature +issues: [] diff --git a/docs/reference/esql/esql-functions.asciidoc b/docs/reference/esql/esql-functions.asciidoc index cd9cfd7646ae7..49b0be636d0a0 100644 --- a/docs/reference/esql/esql-functions.asciidoc +++ b/docs/reference/esql/esql-functions.asciidoc @@ -15,6 +15,7 @@ these functions: * <> * <> * <> +* <> * <> * <> * <> @@ -52,6 +53,7 @@ these functions: * <> * <> * <> +* <> * <> * <> * <> @@ -75,6 +77,7 @@ include::functions/atan.asciidoc[] include::functions/atan2.asciidoc[] include::functions/auto_bucket.asciidoc[] include::functions/case.asciidoc[] +include::functions/ceil.asciidoc[] include::functions/cidr_match.asciidoc[] include::functions/coalesce.asciidoc[] include::functions/concat.asciidoc[] @@ -113,6 +116,7 @@ include::functions/split.asciidoc[] include::functions/sqrt.asciidoc[] include::functions/starts_with.asciidoc[] include::functions/substring.asciidoc[] +include::functions/left.asciidoc[] include::functions/tan.asciidoc[] include::functions/tanh.asciidoc[] include::functions/tau.asciidoc[] diff --git a/docs/reference/esql/functions/acos.asciidoc b/docs/reference/esql/functions/acos.asciidoc index 383e4224a0e1b..1fc64c05637c5 100644 --- a/docs/reference/esql/functions/acos.asciidoc +++ b/docs/reference/esql/functions/acos.asciidoc @@ -1,5 +1,8 @@ [[esql-acos]] === `ACOS` +[.text-center] +image::esql/functions/signature/acos.svg[Embedded,opts=inline] + Inverse https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[cosine] trigonometric function. [source.merge.styled,esql] @@ -10,3 +13,7 @@ include::{esql-specs}/floats.csv-spec[tag=acos] |=== include::{esql-specs}/floats.csv-spec[tag=acos-result] |=== + +Supported types: + +include::types/acos.asciidoc[] diff --git a/docs/reference/esql/functions/asin.asciidoc b/docs/reference/esql/functions/asin.asciidoc index a7ddfde444edd..ed39906a6ea0c 100644 --- a/docs/reference/esql/functions/asin.asciidoc +++ b/docs/reference/esql/functions/asin.asciidoc @@ -1,5 +1,8 @@ [[esql-asin]] === `ASIN` +[.text-center] +image::esql/functions/signature/asin.svg[Embedded,opts=inline] + Inverse https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[sine] trigonometric function. [source.merge.styled,esql] @@ -10,3 +13,7 @@ include::{esql-specs}/floats.csv-spec[tag=asin] |=== include::{esql-specs}/floats.csv-spec[tag=asin-result] |=== + +Supported types: + +include::types/asin.asciidoc[] diff --git a/docs/reference/esql/functions/atan.asciidoc b/docs/reference/esql/functions/atan.asciidoc index cda085ec8eb68..db5069bdd8010 100644 --- a/docs/reference/esql/functions/atan.asciidoc +++ b/docs/reference/esql/functions/atan.asciidoc @@ -1,5 +1,8 @@ [[esql-atan]] === `ATAN` +[.text-center] +image::esql/functions/signature/atan.svg[Embedded,opts=inline] + Inverse https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[tangent] trigonometric function. [source.merge.styled,esql] @@ -10,3 +13,7 @@ include::{esql-specs}/floats.csv-spec[tag=atan] |=== include::{esql-specs}/floats.csv-spec[tag=atan-result] |=== + +Supported types: + +include::types/atan.asciidoc[] diff --git a/docs/reference/esql/functions/atan2.asciidoc b/docs/reference/esql/functions/atan2.asciidoc index 47dee88ddc740..efaf82bc483f0 100644 --- a/docs/reference/esql/functions/atan2.asciidoc +++ b/docs/reference/esql/functions/atan2.asciidoc @@ -1,5 +1,7 @@ [[esql-atan2]] === `ATAN2` +[.text-center] +image::esql/functions/signature/atan2.svg[Embedded,opts=inline] The https://en.wikipedia.org/wiki/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane. @@ -12,3 +14,7 @@ include::{esql-specs}/floats.csv-spec[tag=atan2] |=== include::{esql-specs}/floats.csv-spec[tag=atan2-result] |=== + +Supported types: + +include::types/atan2.asciidoc[] diff --git a/docs/reference/esql/functions/ceil.asciidoc b/docs/reference/esql/functions/ceil.asciidoc new file mode 100644 index 0000000000000..b35ab6d68b4e3 --- /dev/null +++ b/docs/reference/esql/functions/ceil.asciidoc @@ -0,0 +1,23 @@ +[[esql-ceil]] +=== `CEIL` +[.text-center] +image::esql/functions/signature/floor.svg[Embedded,opts=inline] + +Round a number up to the nearest integer. + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=ceil] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=ceil-result] +|=== + +NOTE: This is a noop for `long` (including unsigned) and `integer`. + For `double` this picks the the closest `double` value to the integer ala + {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. + +Supported types: + +include::types/ceil.asciidoc[] diff --git a/docs/reference/esql/functions/cos.asciidoc b/docs/reference/esql/functions/cos.asciidoc index 39d2564dd7d73..bef12ba54f890 100644 --- a/docs/reference/esql/functions/cos.asciidoc +++ b/docs/reference/esql/functions/cos.asciidoc @@ -1,5 +1,8 @@ [[esql-cos]] === `COS` +[.text-center] +image::esql/functions/signature/cos.svg[Embedded,opts=inline] + https://en.wikipedia.org/wiki/Sine_and_cosine[Cosine] trigonometric function. [source.merge.styled,esql] @@ -10,3 +13,7 @@ include::{esql-specs}/floats.csv-spec[tag=cos] |=== include::{esql-specs}/floats.csv-spec[tag=cos-result] |=== + +Supported types: + +include::types/cos.asciidoc[] diff --git a/docs/reference/esql/functions/e.asciidoc b/docs/reference/esql/functions/e.asciidoc index a9d2f3fa340e0..21e947bf4749d 100644 --- a/docs/reference/esql/functions/e.asciidoc +++ b/docs/reference/esql/functions/e.asciidoc @@ -1,5 +1,8 @@ [[esql-e]] === `E` +[.text-center] +image::esql/functions/signature/e.svg[Embedded,opts=inline] + {wikipedia}/E_(mathematical_constant)[Euler's number]. [source.merge.styled,esql] diff --git a/docs/reference/esql/functions/floor.asciidoc b/docs/reference/esql/functions/floor.asciidoc index 595e60e98a6d2..d997e35f71dad 100644 --- a/docs/reference/esql/functions/floor.asciidoc +++ b/docs/reference/esql/functions/floor.asciidoc @@ -1,5 +1,8 @@ [[esql-floor]] === `FLOOR` +[.text-center] +image::esql/functions/signature/floor.svg[Embedded,opts=inline] + Round a number down to the nearest integer. [source.merge.styled,esql] @@ -11,6 +14,10 @@ include::{esql-specs}/math.csv-spec[tag=floor] include::{esql-specs}/math.csv-spec[tag=floor-result] |=== -NOTE: This is a noop for `long` and `integer`. For `double` this picks the - the closest `double` value to the integer ala +NOTE: This is a noop for `long` (including unsigned) and `integer`. + For `double` this picks the the closest `double` value to the integer ala {javadoc}/java.base/java/lang/Math.html#floor(double)[Math.floor]. + +Supported types: + +include::types/floor.asciidoc[] diff --git a/docs/reference/esql/functions/greatest.asciidoc b/docs/reference/esql/functions/greatest.asciidoc index e4eaedd31289d..9c192662dcaaa 100644 --- a/docs/reference/esql/functions/greatest.asciidoc +++ b/docs/reference/esql/functions/greatest.asciidoc @@ -18,3 +18,7 @@ include::{esql-specs}/math.csv-spec[tag=greatest-result] NOTE: When run on `keyword` or `text` fields, this'll return the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. + +Supported types: + +include::types/greatest.asciidoc[] diff --git a/docs/reference/esql/functions/least.asciidoc b/docs/reference/esql/functions/least.asciidoc index 366ddb6deb1e0..8c702246fe5e3 100644 --- a/docs/reference/esql/functions/least.asciidoc +++ b/docs/reference/esql/functions/least.asciidoc @@ -1,5 +1,7 @@ [[esql-least]] === `LEAST` +[.text-center] +image::esql/functions/signature/least.svg[Embedded,opts=inline] Returns the minimum value from many columns. This is similar to <> except it's intended to run on multiple columns at once. @@ -16,3 +18,7 @@ include::{esql-specs}/math.csv-spec[tag=least-result] NOTE: When run on `keyword` or `text` fields, this'll return the first string in alphabetical order. When run on `boolean` columns this will return `false` if any values are `false`. + +Supported types: + +include::types/least.asciidoc[] diff --git a/docs/reference/esql/functions/left.asciidoc b/docs/reference/esql/functions/left.asciidoc new file mode 100644 index 0000000000000..42537d9560ebf --- /dev/null +++ b/docs/reference/esql/functions/left.asciidoc @@ -0,0 +1,14 @@ +[[esql-left]] +=== `LEFT` + +Return the substring that extract 'length' chars +from string starting from 0. + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=left] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=left-result] +|=== diff --git a/docs/reference/esql/functions/log10.asciidoc b/docs/reference/esql/functions/log10.asciidoc index ee19d5a61d1fa..08a6fb91041c7 100644 --- a/docs/reference/esql/functions/log10.asciidoc +++ b/docs/reference/esql/functions/log10.asciidoc @@ -1,5 +1,8 @@ [[esql-log10]] === `LOG10` +[.text-center] +image::esql/functions/signature/log10.svg[Embedded,opts=inline] + Returns the log base 10. The input can be any numeric value, the return value is always a double. @@ -13,3 +16,7 @@ include::{esql-specs}/math.csv-spec[tag=log10] |=== include::{esql-specs}/math.csv-spec[tag=log10-result] |=== + +Supported types: + +include::types/log10.asciidoc[] diff --git a/docs/reference/esql/functions/pi.asciidoc b/docs/reference/esql/functions/pi.asciidoc index 631018fed0055..75e9767d98a33 100644 --- a/docs/reference/esql/functions/pi.asciidoc +++ b/docs/reference/esql/functions/pi.asciidoc @@ -1,5 +1,8 @@ [[esql-pi]] === `PI` +[.text-center] +image::esql/functions/signature/pi.svg[Embedded,opts=inline] + The {wikipedia}/Pi[ratio] of a circle's circumference to its diameter. [source.merge.styled,esql] diff --git a/docs/reference/esql/functions/signature/acos.svg b/docs/reference/esql/functions/signature/acos.svg index 03131b22b78f5..6a2e2c04cd20e 100644 --- a/docs/reference/esql/functions/signature/acos.svg +++ b/docs/reference/esql/functions/signature/acos.svg @@ -1 +1 @@ -ACOS(arg1) \ No newline at end of file +ACOS(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/asin.svg b/docs/reference/esql/functions/signature/asin.svg index 762533569d089..9792e7316b138 100644 --- a/docs/reference/esql/functions/signature/asin.svg +++ b/docs/reference/esql/functions/signature/asin.svg @@ -1 +1 @@ -ASIN(arg1) \ No newline at end of file +ASIN(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/atan.svg b/docs/reference/esql/functions/signature/atan.svg index d3bc94c779e79..184956ff2e126 100644 --- a/docs/reference/esql/functions/signature/atan.svg +++ b/docs/reference/esql/functions/signature/atan.svg @@ -1 +1 @@ -ATAN(arg1) \ No newline at end of file +ATAN(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/atan2.svg b/docs/reference/esql/functions/signature/atan2.svg index 1ded29d4311cc..f2295d3d98f16 100644 --- a/docs/reference/esql/functions/signature/atan2.svg +++ b/docs/reference/esql/functions/signature/atan2.svg @@ -1 +1 @@ -ATAN2(arg1,arg2) \ No newline at end of file +ATAN2(y,x) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/ceil.svg b/docs/reference/esql/functions/signature/ceil.svg new file mode 100644 index 0000000000000..baff44ba0cb70 --- /dev/null +++ b/docs/reference/esql/functions/signature/ceil.svg @@ -0,0 +1 @@ +CEIL(arg1) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/cos.svg b/docs/reference/esql/functions/signature/cos.svg index 9bcc26aea71d8..f06a24726f71a 100644 --- a/docs/reference/esql/functions/signature/cos.svg +++ b/docs/reference/esql/functions/signature/cos.svg @@ -1 +1 @@ -COS(arg1) \ No newline at end of file +COS(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/cosh.svg b/docs/reference/esql/functions/signature/cosh.svg index c6a0483d2241c..54ea9bff84097 100644 --- a/docs/reference/esql/functions/signature/cosh.svg +++ b/docs/reference/esql/functions/signature/cosh.svg @@ -1 +1 @@ -COSH(arg1) \ No newline at end of file +COSH(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/floor.svg b/docs/reference/esql/functions/signature/floor.svg index 6b224de1f9e77..7e153548bfd82 100644 --- a/docs/reference/esql/functions/signature/floor.svg +++ b/docs/reference/esql/functions/signature/floor.svg @@ -1 +1 @@ -FLOOR(arg1) \ No newline at end of file +FLOOR(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/least.svg b/docs/reference/esql/functions/signature/least.svg index 52507c4c62d4f..ec0ed0efcec62 100644 --- a/docs/reference/esql/functions/signature/least.svg +++ b/docs/reference/esql/functions/signature/least.svg @@ -1 +1 @@ -LEAST(arg1,arg2) \ No newline at end of file +LEAST(first,rest) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/log10.svg b/docs/reference/esql/functions/signature/log10.svg index 62993d668f0bd..50edcf6ea943f 100644 --- a/docs/reference/esql/functions/signature/log10.svg +++ b/docs/reference/esql/functions/signature/log10.svg @@ -1 +1 @@ -LOG10(arg1) \ No newline at end of file +LOG10(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sin.svg b/docs/reference/esql/functions/signature/sin.svg index f22b1bc19a259..eb20f3386d441 100644 --- a/docs/reference/esql/functions/signature/sin.svg +++ b/docs/reference/esql/functions/signature/sin.svg @@ -1 +1 @@ -SIN(arg1) \ No newline at end of file +SIN(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sinh.svg b/docs/reference/esql/functions/signature/sinh.svg index fb21b2d436bf2..30361aca1fb35 100644 --- a/docs/reference/esql/functions/signature/sinh.svg +++ b/docs/reference/esql/functions/signature/sinh.svg @@ -1 +1 @@ -SINH(arg1) \ No newline at end of file +SINH(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sqrt.svg b/docs/reference/esql/functions/signature/sqrt.svg index 55d39a59c8f64..77c657120735f 100644 --- a/docs/reference/esql/functions/signature/sqrt.svg +++ b/docs/reference/esql/functions/signature/sqrt.svg @@ -1 +1 @@ -SQRT(arg1) \ No newline at end of file +SQRT(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/tan.svg b/docs/reference/esql/functions/signature/tan.svg index c2ea4c2081a16..8ac6ee37cb52a 100644 --- a/docs/reference/esql/functions/signature/tan.svg +++ b/docs/reference/esql/functions/signature/tan.svg @@ -1 +1 @@ -TAN(arg1) \ No newline at end of file +TAN(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/tanh.svg b/docs/reference/esql/functions/signature/tanh.svg index 046e09369b1dc..dfe167afc5470 100644 --- a/docs/reference/esql/functions/signature/tanh.svg +++ b/docs/reference/esql/functions/signature/tanh.svg @@ -1 +1 @@ -TANH(arg1) \ No newline at end of file +TANH(n) \ No newline at end of file diff --git a/docs/reference/esql/functions/sin.asciidoc b/docs/reference/esql/functions/sin.asciidoc index 7c02ded0a2f72..a622a6e496ce9 100644 --- a/docs/reference/esql/functions/sin.asciidoc +++ b/docs/reference/esql/functions/sin.asciidoc @@ -1,5 +1,8 @@ [[esql-sin]] === `SIN` +[.text-center] +image::esql/functions/signature/sin.svg[Embedded,opts=inline] + https://en.wikipedia.org/wiki/Sine_and_cosine[Sine] trigonometric function. [source.merge.styled,esql] @@ -10,3 +13,7 @@ include::{esql-specs}/floats.csv-spec[tag=sin] |=== include::{esql-specs}/floats.csv-spec[tag=sin-result] |=== + +Supported types: + +include::types/sin.asciidoc[] diff --git a/docs/reference/esql/functions/sinh.asciidoc b/docs/reference/esql/functions/sinh.asciidoc index 241b4f978349d..054170d3fed27 100644 --- a/docs/reference/esql/functions/sinh.asciidoc +++ b/docs/reference/esql/functions/sinh.asciidoc @@ -1,5 +1,8 @@ [[esql-sinh]] === `SINH` +[.text-center] +image::esql/functions/signature/sinh.svg[Embedded,opts=inline] + https://en.wikipedia.org/wiki/Hyperbolic_functions[Sine] hyperbolic function. [source.merge.styled,esql] @@ -10,3 +13,7 @@ include::{esql-specs}/floats.csv-spec[tag=sinh] |=== include::{esql-specs}/floats.csv-spec[tag=sinh-result] |=== + +Supported types: + +include::types/sinh.asciidoc[] diff --git a/docs/reference/esql/functions/sqrt.asciidoc b/docs/reference/esql/functions/sqrt.asciidoc index 189deefa2cf90..a64dd0d422d15 100644 --- a/docs/reference/esql/functions/sqrt.asciidoc +++ b/docs/reference/esql/functions/sqrt.asciidoc @@ -1,5 +1,8 @@ [[esql-sqrt]] === `SQRT` +[.text-center] +image::esql/functions/signature/sqrt.svg[Embedded,opts=inline] + Returns the square root of a number. The input can be any numeric value, the return value is always a double. @@ -13,3 +16,7 @@ include::{esql-specs}/math.csv-spec[tag=sqrt] |=== include::{esql-specs}/math.csv-spec[tag=sqrt-result] |=== + +Supported types: + +include::types/sqrt.asciidoc[] diff --git a/docs/reference/esql/functions/tan.asciidoc b/docs/reference/esql/functions/tan.asciidoc index fc64317135a44..8d5a58e7555b1 100644 --- a/docs/reference/esql/functions/tan.asciidoc +++ b/docs/reference/esql/functions/tan.asciidoc @@ -1,5 +1,8 @@ [[esql-tan]] === `TAN` +[.text-center] +image::esql/functions/signature/tan.svg[Embedded,opts=inline] + https://en.wikipedia.org/wiki/Sine_and_cosine[Tangent] trigonometric function. [source.merge.styled,esql] @@ -10,3 +13,7 @@ include::{esql-specs}/floats.csv-spec[tag=tan] |=== include::{esql-specs}/floats.csv-spec[tag=tan-result] |=== + +Supported types: + +include::types/tan.asciidoc[] diff --git a/docs/reference/esql/functions/tanh.asciidoc b/docs/reference/esql/functions/tanh.asciidoc index f9fcec10394d6..e5d67d9f19063 100644 --- a/docs/reference/esql/functions/tanh.asciidoc +++ b/docs/reference/esql/functions/tanh.asciidoc @@ -1,5 +1,8 @@ [[esql-tanh]] === `TANH` +[.text-center] +image::esql/functions/signature/tanh.svg[Embedded,opts=inline] + https://en.wikipedia.org/wiki/Hyperbolic_functions[Tangent] hyperbolic function. [source.merge.styled,esql] @@ -10,3 +13,7 @@ include::{esql-specs}/floats.csv-spec[tag=tanh] |=== include::{esql-specs}/floats.csv-spec[tag=tanh-result] |=== + +Supported types: + +include::types/tanh.asciidoc[] diff --git a/docs/reference/esql/functions/tau.asciidoc b/docs/reference/esql/functions/tau.asciidoc index f2891baf73db6..c35d07fe74642 100644 --- a/docs/reference/esql/functions/tau.asciidoc +++ b/docs/reference/esql/functions/tau.asciidoc @@ -1,5 +1,8 @@ [[esql-tau]] === `TAU` +[.text-center] +image::esql/functions/signature/tau.svg[Embedded,opts=inline] + The https://tauday.com/tau-manifesto[ratio] of a circle's circumference to its radius. [source.merge.styled,esql] diff --git a/docs/reference/esql/functions/types/acos.asciidoc b/docs/reference/esql/functions/types/acos.asciidoc index dd4f6b0725cc8..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/acos.asciidoc +++ b/docs/reference/esql/functions/types/acos.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/asin.asciidoc b/docs/reference/esql/functions/types/asin.asciidoc index dd4f6b0725cc8..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/asin.asciidoc +++ b/docs/reference/esql/functions/types/asin.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/atan.asciidoc b/docs/reference/esql/functions/types/atan.asciidoc index dd4f6b0725cc8..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/atan.asciidoc +++ b/docs/reference/esql/functions/types/atan.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/atan2.asciidoc b/docs/reference/esql/functions/types/atan2.asciidoc index 3b01caac0e4ee..74fffe9056a16 100644 --- a/docs/reference/esql/functions/types/atan2.asciidoc +++ b/docs/reference/esql/functions/types/atan2.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +y | x | result double | double | double double | integer | double double | long | double diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index fed63b005dbc7..269265c5638e1 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== arg1 | arg2... | result -boolean | keyword | keyword | keyword + |=== diff --git a/docs/reference/esql/functions/types/ceil.asciidoc b/docs/reference/esql/functions/types/ceil.asciidoc new file mode 100644 index 0000000000000..f1831429aa95c --- /dev/null +++ b/docs/reference/esql/functions/types/ceil.asciidoc @@ -0,0 +1,8 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +arg1 | result +double | double +integer | integer +long | long +unsigned_long | unsigned_long +|=== diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index e1ef2d3d60560..cfb032571c5d3 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -1,49 +1,9 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== arg1 | arg2... | result -boolean | boolean boolean | boolean | boolean -boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean -integer | integer integer | integer | integer -integer | integer | integer | integer -integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer | integer | integer | integer -keyword | keyword keyword | keyword | keyword -keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword -long | long long | long | long -long | long | long | long -long | long | long | long | long -long | long | long | long | long | long -long | long | long | long | long | long | long -long | long | long | long | long | long | long | long -long | long | long | long | long | long | long | long | long -long | long | long | long | long | long | long | long | long | long -text | text text | text | text -text | text | text | text -text | text | text | text | text -text | text | text | text | text | text -text | text | text | text | text | text | text -text | text | text | text | text | text | text | text -text | text | text | text | text | text | text | text | text -text | text | text | text | text | text | text | text | text | text |=== diff --git a/docs/reference/esql/functions/types/cos.asciidoc b/docs/reference/esql/functions/types/cos.asciidoc index dd4f6b0725cc8..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/cos.asciidoc +++ b/docs/reference/esql/functions/types/cos.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/cosh.asciidoc b/docs/reference/esql/functions/types/cosh.asciidoc index dd4f6b0725cc8..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/cosh.asciidoc +++ b/docs/reference/esql/functions/types/cosh.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/e.asciidoc b/docs/reference/esql/functions/types/e.asciidoc index ddb78d7b651fb..5854465d5fb49 100644 --- a/docs/reference/esql/functions/types/e.asciidoc +++ b/docs/reference/esql/functions/types/e.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== result -integer | double + |=== diff --git a/docs/reference/esql/functions/types/floor.asciidoc b/docs/reference/esql/functions/types/floor.asciidoc index 09cb78511d275..54341360fed3f 100644 --- a/docs/reference/esql/functions/types/floor.asciidoc +++ b/docs/reference/esql/functions/types/floor.asciidoc @@ -1,5 +1,8 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double +integer | integer +long | long +unsigned_long | unsigned_long |=== diff --git a/docs/reference/esql/functions/types/greatest.asciidoc b/docs/reference/esql/functions/types/greatest.asciidoc index d8dc24ced9169..0ce6e8148d1a0 100644 --- a/docs/reference/esql/functions/types/greatest.asciidoc +++ b/docs/reference/esql/functions/types/greatest.asciidoc @@ -1,49 +1,9 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== first | rest... | result -boolean | boolean boolean | boolean | boolean -boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean -integer | integer integer | integer | integer -integer | integer | integer | integer -integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer | integer | integer | integer -keyword | keyword keyword | keyword | keyword -keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword -long | long long | long | long -long | long | long | long -long | long | long | long | long -long | long | long | long | long | long -long | long | long | long | long | long | long -long | long | long | long | long | long | long | long -long | long | long | long | long | long | long | long | long -long | long | long | long | long | long | long | long | long | long -text | text text | text | text -text | text | text | text -text | text | text | text | text -text | text | text | text | text | text -text | text | text | text | text | text | text -text | text | text | text | text | text | text | text -text | text | text | text | text | text | text | text | text -text | text | text | text | text | text | text | text | text | text |=== diff --git a/docs/reference/esql/functions/types/least.asciidoc b/docs/reference/esql/functions/types/least.asciidoc index e1ef2d3d60560..0ce6e8148d1a0 100644 --- a/docs/reference/esql/functions/types/least.asciidoc +++ b/docs/reference/esql/functions/types/least.asciidoc @@ -1,49 +1,9 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2... | result -boolean | boolean +first | rest... | result boolean | boolean | boolean -boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean -boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean | boolean -integer | integer integer | integer | integer -integer | integer | integer | integer -integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer | integer | integer -integer | integer | integer | integer | integer | integer | integer | integer | integer | integer -keyword | keyword keyword | keyword | keyword -keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword -keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword | keyword -long | long long | long | long -long | long | long | long -long | long | long | long | long -long | long | long | long | long | long -long | long | long | long | long | long | long -long | long | long | long | long | long | long | long -long | long | long | long | long | long | long | long | long -long | long | long | long | long | long | long | long | long | long -text | text text | text | text -text | text | text | text -text | text | text | text | text -text | text | text | text | text | text -text | text | text | text | text | text | text -text | text | text | text | text | text | text | text -text | text | text | text | text | text | text | text | text -text | text | text | text | text | text | text | text | text | text |=== diff --git a/docs/reference/esql/functions/types/log10.asciidoc b/docs/reference/esql/functions/types/log10.asciidoc index 09cb78511d275..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/log10.asciidoc +++ b/docs/reference/esql/functions/types/log10.asciidoc @@ -1,5 +1,8 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double +integer | double +long | double +unsigned_long | double |=== diff --git a/docs/reference/esql/functions/types/pi.asciidoc b/docs/reference/esql/functions/types/pi.asciidoc index ddb78d7b651fb..5854465d5fb49 100644 --- a/docs/reference/esql/functions/types/pi.asciidoc +++ b/docs/reference/esql/functions/types/pi.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== result -integer | double + |=== diff --git a/docs/reference/esql/functions/types/sin.asciidoc b/docs/reference/esql/functions/types/sin.asciidoc index dd4f6b0725cc8..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/sin.asciidoc +++ b/docs/reference/esql/functions/types/sin.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/sinh.asciidoc b/docs/reference/esql/functions/types/sinh.asciidoc index dd4f6b0725cc8..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/sinh.asciidoc +++ b/docs/reference/esql/functions/types/sinh.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/sqrt.asciidoc b/docs/reference/esql/functions/types/sqrt.asciidoc index 09cb78511d275..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/sqrt.asciidoc +++ b/docs/reference/esql/functions/types/sqrt.asciidoc @@ -1,5 +1,8 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double +integer | double +long | double +unsigned_long | double |=== diff --git a/docs/reference/esql/functions/types/tan.asciidoc b/docs/reference/esql/functions/types/tan.asciidoc index dd4f6b0725cc8..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/tan.asciidoc +++ b/docs/reference/esql/functions/types/tan.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/tanh.asciidoc b/docs/reference/esql/functions/types/tanh.asciidoc index dd4f6b0725cc8..1df8dd6526f18 100644 --- a/docs/reference/esql/functions/types/tanh.asciidoc +++ b/docs/reference/esql/functions/types/tanh.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +n | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/tau.asciidoc b/docs/reference/esql/functions/types/tau.asciidoc index ddb78d7b651fb..5854465d5fb49 100644 --- a/docs/reference/esql/functions/types/tau.asciidoc +++ b/docs/reference/esql/functions/types/tau.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== result -integer | double + |=== diff --git a/docs/reference/setup/install/auto-config-output.asciidoc b/docs/reference/setup/install/auto-config-output.asciidoc deleted file mode 100644 index 99fc097646f00..0000000000000 --- a/docs/reference/setup/install/auto-config-output.asciidoc +++ /dev/null @@ -1,23 +0,0 @@ -[role="exclude"] - -["source","sh",subs="attributes"] ----- -The generated password for the elastic built-in superuser is: - - -The enrollment token for Kibana instances, valid for the next 30 minutes: - - -The hex-encoded SHA-256 fingerprint of the generated HTTPS CA DER-encoded certificate: - - -You can complete the following actions at any time: -Reset the password of the elastic built-in superuser with -'bin{slash}elasticsearch-reset-password -u elastic'. - -Generate an enrollment token for Kibana instances with -'bin{slash}elasticsearch-create-enrollment-token -s kibana'. - -Generate an enrollment token for Elasticsearch nodes with -'bin{slash}elasticsearch-create-enrollment-token -s node'. ----- \ No newline at end of file diff --git a/docs/reference/setup/install/check-running.asciidoc b/docs/reference/setup/install/check-running.asciidoc index 726de3ed9a0e4..ecbb9895e6c34 100644 --- a/docs/reference/setup/install/check-running.asciidoc +++ b/docs/reference/setup/install/check-running.asciidoc @@ -5,7 +5,7 @@ You can test that your {es} node is running by sending an HTTPS request to port ["source","sh",subs="attributes"] ---- -curl --cacert {es-conf}{slash}certs{slash}http_ca.crt -u elastic https://localhost:9200 <1> +curl --cacert {es-conf}{slash}certs{slash}http_ca.crt -u elastic:$ELASTIC_PASSWORD https://localhost:9200 <1> ---- // NOTCONSOLE <1> Ensure that you use `https` in your call, or the request will fail. @@ -13,8 +13,7 @@ curl --cacert {es-conf}{slash}certs{slash}http_ca.crt -u elastic https://localho `--cacert`:: Path to the generated `http_ca.crt` certificate for the HTTP layer. -Enter the password for the `elastic` user that was generated during -installation, which should return a response like this: +The call returns a response like this: //// The following hidden request is required before the response. Otherwise, you'll diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 9867449fedd53..0abba259d46eb 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -1,10 +1,9 @@ [[docker]] === Install {es} with Docker -{es} is also available as Docker images. A list of all published Docker -images and tags is available at -https://www.docker.elastic.co[www.docker.elastic.co]. The source files are -in +{es} is available as a Docker image. A list of all published Docker images and +tags is available at https://www.docker.elastic.co[www.docker.elastic.co]. The +source files are in https://github.com/elastic/elasticsearch/blob/{branch}/distribution/docker[Github]. include::license.asciidoc[] @@ -13,19 +12,19 @@ Starting in {es} 8.0, security is enabled by default. With security enabled, {stack} {security-features} require TLS encryption for the transport networking layer, or your cluster will fail to start. -==== Install Docker Desktop or Docker Engine +==== Install Docker -Install the appropriate https://docs.docker.com/get-docker/[Docker application] -for your operating system. +Visit https://docs.docker.com/get-docker/[Get Docker] to install Docker for your +environment. -NOTE: Make sure that Docker is allotted at least 4GiB of memory. In Docker -Desktop, you configure resource usage on the Advanced tab in Preference (macOS) -or Settings (Windows). +IMPORTANT: If using Docker Desktop, make sure to allocate at least 4GB of +memory. You can adjust memory usage in Docker Desktop by going to **Settings > +Resources**. -==== Pull the {es} Docker image +==== Pull the Docker image -Obtaining {es} for Docker is as simple as issuing a `docker pull` command -against the Elastic Docker registry. +Use the `docker pull` command to pull the {es} image from the the Elastic Docker +registry. ifeval::["{release-state}"=="unreleased"] @@ -44,12 +43,11 @@ docker pull {docker-repo}:{version} endif::[] [[docker-verify-signature]] -==== Optional: Verify the {es} Docker image signature +==== Optional: Verify the image signature -Although it's optional, we highly recommend verifying the signatures included with your downloaded Docker images to ensure that the images are valid. +Verify the signatures included in your {es} Docker images to ensure they're valid. Elastic images are signed with https://docs.sigstore.dev/cosign/overview/[Cosign] which is part of the https://www.sigstore.dev/[Sigstore] project. - Cosign supports container signing, verification, and storage in an OCI registry. ifeval::["{release-state}"=="unreleased"] @@ -76,7 +74,7 @@ cosign verify --key cosign.pub {docker-repo}:{version} <2> The command prints the check results and the signature payload in JSON format: -[source,sh] +[source,sh,subs="attributes"] -------------------------------------------- Verification for docker.elastic.co/elasticsearch/elasticsearch:{version} -- The following checks were performed on each of these signatures: @@ -87,47 +85,35 @@ The following checks were performed on each of these signatures: endif::[] -Now that you have verified the {es} Docker image signature, you can start a -<> or <> -cluster. [[docker-cli-run-dev-mode]] -==== Start a single-node cluster with Docker +==== Run {es} in Docker -ifeval::["{release-state}"=="unreleased"] +Use Docker commands to start a single-node {es} cluster for development or +testing. You can then run additional Docker commands to add nodes to the test +cluster. -WARNING: Version {version} of the {es} Docker image has not yet been released. +TIP: This setup doesn't run multiple {es} nodes or {kib} by default. To create a +multi-node cluster with {kib}, use Docker Compose instead. See +<>. -endif::[] -If you're starting a single-node {es} cluster in a Docker container, security -will be automatically enabled and configured for you. When you start {es} for -the first time, the following security configuration occurs automatically: +===== Start a single-node cluster -* <> are generated -for the transport and HTTP layers. -* The Transport Layer Security (TLS) configuration settings are written to -`elasticsearch.yml`. -* A password is generated for the `elastic` user. -* An enrollment token is generated for {kib}. +ifeval::["{release-state}"=="unreleased"] -You can then {kibana-ref}/docker.html[start {kib}] and enter the enrollment -token, which is valid for 30 minutes. This token automatically applies the -security settings from your {es} cluster, authenticates to {es} with the -`kibana_system` user, and writes the security configuration to `kibana.yml`. +WARNING: Version {version} of the {es} Docker image has not yet been released. -The following commands start a single-node {es} cluster for development or -testing. +endif::[] -. Create a new docker network for {es} and {kib} +. Create a new docker network. + [source,sh] ---- docker network create elastic ---- -. Start {es} in Docker. A password is generated for the `elastic` user and -output to the terminal, plus an enrollment token for enrolling {kib}. +. Start an {es} container. + -- ifeval::["{release-state}"=="unreleased"] @@ -140,78 +126,64 @@ endif::[] ifeval::["{release-state}"!="unreleased"] [source,sh,subs="attributes"] ---- -docker run --name es01 --net elastic -p 9200:9200 -it {docker-image} +docker run --name es01 --net elastic -p 9200:9200 -it -m 1GB {docker-image} ---- +TIP: Use the `-m` flag to set a memory limit for the container. + +The command prints the `elastic` user password and an enrollment token for {kib}. + endif::[] -- -+ -TIP: You might need to scroll back a bit in the terminal to view the password -and enrollment token. -. Copy the generated password and enrollment token and save them in a secure -location. These values are shown only when you start {es} for the first time. +. Copy the generated `elastic` password and enrollment token. These credentials +are only shown when you start {es} for the first time. You can regenerate the +credentials using the following commands. + -[NOTE] -==== -If you need to reset the password for the `elastic` user or other -built-in users, run the <> tool. -This tool is available in the {es} `/bin` directory of the Docker container. -For example: +-- +[source,sh,subs="attributes"] +---- +docker exec -it es01 /usr/share/elasticsearch/bin/elasticsearch-reset-password +docker exec -it es01 /usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token -s kibana +---- + +We recommend storing the `elastic` password as an environment variable in your shell. Example: [source,sh] ---- -docker exec -it es01 /usr/share/elasticsearch/bin/elasticsearch-reset-password +export ELASTIC_PASSWORD="your_password" ---- -==== +-- -. Copy the `http_ca.crt` security certificate from your Docker container to -your local machine. +. Copy the `http_ca.crt` SSL certificate from the container to your local machine. + [source,sh] ---- docker cp es01:/usr/share/elasticsearch/config/certs/http_ca.crt . ---- -. Open a new terminal and verify that you can connect to your {es} cluster by -making an authenticated call, using the `http_ca.crt` file that you copied from -your Docker container. Enter the password for the `elastic` user when prompted. +. Make a REST API call to {es} to ensure the {es} container is running. + [source,sh] ---- -curl --cacert http_ca.crt -u elastic https://localhost:9200 +curl --cacert http_ca.crt -u elastic:$ELASTIC_PASSWORD https://localhost:9200 ---- // NOTCONSOLE -==== Enroll additional nodes - -When you start {es} for the first time, the installation process configures a single-node cluster by default. This process also generates an enrollment token -and prints it to your terminal. If you want a node to join an existing cluster, -start the new node with the generated enrollment token. +===== Add more nodes +. Use an existing node to generate a enrollment token for the new node. ++ -- -.Generating enrollment tokens -**** -The enrollment token is valid for 30 minutes. If you need to generate a -new enrollment token, run the -<> tool on your -existing node. This tool is available in the {es} `bin` directory of the Docker -container. - -For example, run the following command on the existing `es01` node to -generate an enrollment token for new {es} nodes: - [source,sh] ---- docker exec -it es01 /usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token -s node ---- -**** --- -. In the terminal where you started your first node, copy the generated -enrollment token for adding new {es} nodes. +The enrollment token is valid for 30 minutes. +-- -. On your new node, start {es} and include the generated enrollment token. +. Start a new {es} container. Include the enrollment token as an environment variable. + -- ifeval::["{release-state}"=="unreleased"] @@ -228,10 +200,16 @@ docker run -e ENROLLMENT_TOKEN="" --name es02 --net elastic -it {docker-i ---- endif::[] - -{es} is now configured to join the existing cluster. -- +. Call the <> to verify the node was added to the cluster. ++ +[source,sh] +---- +curl --cacert http_ca.crt -u elastic:$ELASTIC_PASSWORD https://localhost:9200/_cat/nodes +---- +// NOTCONSOLE + ===== Setting JVM heap size If you experience issues where the container where your first node is running exits when your second node starts, explicitly set values for the JVM heap size. diff --git a/docs/reference/setup/install/package-security.asciidoc b/docs/reference/setup/install/package-security.asciidoc index fb613c4786ef2..40bd49d064b43 100644 --- a/docs/reference/setup/install/package-security.asciidoc +++ b/docs/reference/setup/install/package-security.asciidoc @@ -10,31 +10,13 @@ the `elastic` built-in superuser. * Certificates and keys for TLS are generated for the transport and HTTP layer, and TLS is enabled and configured with these keys and certificates. -The password and certificate and keys are output to your terminal. For example: +The password and certificate and keys are output to your terminal. + +We recommend storing the `elastic` password as an environment variable in your shell. Example: [source,sh] ---- - -------Security autoconfiguration information------- - -Authentication and authorization are enabled. -TLS for the transport and HTTP layers is enabled and configured. - -The generated password for the elastic built-in superuser is : - -If this node should join an existing cluster, you can reconfigure this with -'/usr/share/elasticsearch/bin/elasticsearch-reconfigure-node --enrollment-token ' -after creating an enrollment token on your existing cluster. - -You can complete the following actions at any time: - -Reset the password of the elastic built-in superuser with -'/usr/share/elasticsearch/bin/elasticsearch-reset-password -u elastic'. - -Generate an enrollment token for Kibana instances with - '/usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token -s kibana'. - -Generate an enrollment token for Elasticsearch nodes with -'/usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token -s node'. +export ELASTIC_PASSWORD="your_password" ---- ===== Reconfigure a node to join an existing cluster diff --git a/docs/reference/setup/install/targz-start.asciidoc b/docs/reference/setup/install/targz-start.asciidoc index d9208d7d70e5f..294f0e1541fbc 100644 --- a/docs/reference/setup/install/targz-start.asciidoc +++ b/docs/reference/setup/install/targz-start.asciidoc @@ -18,11 +18,14 @@ and TLS is enabled and configured with these keys and certificates. * An enrollment token is generated for {kib}, which is valid for 30 minutes. The password for the `elastic` user and the enrollment token for {kib} are -output to your terminal. For example: +output to your terminal. -:slash: / +We recommend storing the `elastic` password as an environment variable in your shell. Example: -include::auto-config-output.asciidoc[] +[source,sh] +---- +export ELASTIC_PASSWORD="your_password" +---- If you have password-protected the {es} keystore, you will be prompted to enter the keystore's password. See <> for more diff --git a/docs/reference/setup/install/zip-windows-start.asciidoc b/docs/reference/setup/install/zip-windows-start.asciidoc index 16566d92023d8..29356d398c808 100644 --- a/docs/reference/setup/install/zip-windows-start.asciidoc +++ b/docs/reference/setup/install/zip-windows-start.asciidoc @@ -18,11 +18,14 @@ and TLS is enabled and configured with these keys and certificates. * An enrollment token is generated for {kib}, which is valid for 30 minutes. The password for the `elastic` user and the enrollment token for {kib} are -output to your terminal. For example: +output to your terminal. -:slash: \ +We recommend storing the `elastic` password as an environment variable in your shell. Example: -include::auto-config-output.asciidoc[] +[source,sh] +---- +$ELASTIC_PASSWORD = "your_password" +---- If you have password-protected the {es} keystore, you will be prompted to enter the keystore's password. See <> for more details. diff --git a/docs/reference/tab-widgets/api-call-widget.asciidoc b/docs/reference/tab-widgets/api-call-widget.asciidoc deleted file mode 100644 index 37f49f89847cf..0000000000000 --- a/docs/reference/tab-widgets/api-call-widget.asciidoc +++ /dev/null @@ -1,40 +0,0 @@ -++++ -
-
- - -
-
-++++ - -include::api-call.asciidoc[tag=cloud] - -++++ -
- -
-++++ diff --git a/docs/reference/tab-widgets/api-call.asciidoc b/docs/reference/tab-widgets/api-call.asciidoc deleted file mode 100644 index 0f4a005d956a0..0000000000000 --- a/docs/reference/tab-widgets/api-call.asciidoc +++ /dev/null @@ -1,69 +0,0 @@ -// tag::cloud[] -**Use {kib}** - -//tag::kibana-api-ex[] -. Open {kib}'s main menu and go to **Dev Tools > Console**. -+ -[role="screenshot"] -image::images/kibana-console.png[{kib} Console,align="center"] - -. Run the following example API request in the console: -+ -[source,console] ----- -GET / ----- - -//end::kibana-api-ex[] - -**Use curl** - -To communicate with {es} using curl or another client, you need your cluster's -endpoint. - -. Open {kib}'s main menu and click **Manage this deployment**. - -. From your deployment menu, go to the **Elasticsearch** page. Click **Copy -endpoint**. - -. To submit an example API request, run the following curl command in a new -terminal session. Replace `` with the password for the `elastic` user. -Replace `` with your endpoint. -+ -[source,sh] ----- -curl -u elastic: / ----- -// NOTCONSOLE - -// end::cloud[] - -// tag::self-managed[] -**Use {kib}** - -include::api-call.asciidoc[tag=kibana-api-ex] - -**Use curl** - -To submit an example API request, run the following curl command in a new -terminal session. - -. Copy the `http_ca.crt` security certificate from your Docker container to -your local machine. -+ -[source,sh] ----- -docker cp es01:/usr/share/elasticsearch/config/certs/http_ca.crt . ----- - -. Open a new terminal and verify that you can connect to your {es} cluster by -making an authenticated call, using the `http_ca.crt` file that you copied from -your Docker container. Enter the password for the `elastic` user when prompted. -+ -[source,sh] ----- -curl --cacert http_ca.crt -u elastic https://localhost:9200 ----- -// NOTCONSOLE - -// end::self-managed[] diff --git a/libs/core/src/test/java/org/elasticsearch/jdk/JarHellTests.java b/libs/core/src/test/java/org/elasticsearch/jdk/JarHellTests.java index 222ca144098a7..6bcc1d4b8fb58 100644 --- a/libs/core/src/test/java/org/elasticsearch/jdk/JarHellTests.java +++ b/libs/core/src/test/java/org/elasticsearch/jdk/JarHellTests.java @@ -121,7 +121,7 @@ public void testDirAndJar() throws Exception { public void testNonJDKModuleURLs() throws Throwable { var bootLayer = ModuleLayer.boot(); - Path fooDir = createTempDir(getTestName()); + Path fooDir = createTempDir(); Path fooJar = PathUtils.get(makeJar(fooDir, "foo.jar", null, "p/Foo.class").toURI()); var fooConfiguration = bootLayer.configuration().resolve(ModuleFinder.of(), ModuleFinder.of(fooJar), List.of("foo")); Set urls = JarHell.nonJDKModuleURLs(fooConfiguration).collect(Collectors.toSet()); diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java index 4298ec1ac6c99..ab4a008d829c6 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java @@ -16,7 +16,6 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -1029,8 +1028,6 @@ private void testSearchCase( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - final IndexSearcher indexSearcher = newIndexSearcher(indexReader); - final AutoDateHistogramAggregationBuilder aggregationBuilder = new AutoDateHistogramAggregationBuilder("_name"); if (configure != null) { configure.accept(aggregationBuilder); @@ -1042,7 +1039,7 @@ private void testSearchCase( MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType(NUMERIC_FIELD, NumberFieldMapper.NumberType.LONG); final InternalAutoDateHistogram histogram = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, fieldType, instantFieldType, numericFieldType).withQuery(query) ); verify.accept(histogram); diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java index 0fbe0c47e8d54..1de3c513bef9c 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; @@ -33,11 +32,10 @@ public void testNoData() throws Exception { indexWriter.addDocument(Collections.singleton(new StringField("another_field", "value", Field.Store.NO))); } try (IndexReader reader = indexWriter.getReader()) { - IndexSearcher searcher = newSearcher(reader); MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Collections.singletonList("field") ); - InternalMatrixStats stats = searchAndReduce(searcher, new AggTestConfig(aggBuilder, ft)); + InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ft)); assertNull(stats.getStats()); assertEquals(0L, stats.getDocCount()); } @@ -52,11 +50,10 @@ public void testUnmapped() throws Exception { indexWriter.addDocument(Collections.singleton(new StringField("another_field", "value", Field.Store.NO))); } try (IndexReader reader = indexWriter.getReader()) { - IndexSearcher searcher = newSearcher(reader); MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Collections.singletonList("bogus") ); - InternalMatrixStats stats = searchAndReduce(searcher, new AggTestConfig(aggBuilder, ft)); + InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ft)); assertNull(stats.getStats()); assertEquals(0L, stats.getDocCount()); } @@ -87,11 +84,10 @@ public void testTwoFields() throws Exception { MultiPassStats multiPassStats = new MultiPassStats(fieldA, fieldB); multiPassStats.computeStats(Arrays.asList(fieldAValues), Arrays.asList(fieldBValues)); try (IndexReader reader = indexWriter.getReader()) { - IndexSearcher searcher = newSearcher(reader); MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Arrays.asList(fieldA, fieldB) ); - InternalMatrixStats stats = searchAndReduce(searcher, new AggTestConfig(aggBuilder, ftA, ftB)); + InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ftA, ftB)); multiPassStats.assertNearlyEqual(stats); assertTrue(MatrixAggregationInspectionHelper.hasValue(stats)); } diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/CumulativeSumAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/CumulativeSumAggregatorTests.java index 17a0836967a65..ce9a47796fc03 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -314,13 +313,11 @@ private void executeTestCase( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(HISTO_FIELD); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); InternalAggregation histogram; - histogram = searchAndReduce(indexSearcher, new AggTestConfig(aggBuilder, fieldType, valueFieldType).withQuery(query)); + histogram = searchAndReduce(indexReader, new AggTestConfig(aggBuilder, fieldType, valueFieldType).withQuery(query)); verify.accept(histogram); } } diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/DerivativeAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/DerivativeAggregatorTests.java index ab560999bd6dc..70eb63c5e61da 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/DerivativeAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/DerivativeAggregatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -666,8 +665,7 @@ public void testDerivDerivNPE() throws IOException { } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - searchAndReduce(indexSearcher, new AggTestConfig(aggBuilder).withQuery(query)); + searchAndReduce(indexReader, new AggTestConfig(aggBuilder).withQuery(query)); } } } @@ -717,13 +715,11 @@ private void executeTestCase( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(SINGLE_VALUED_FIELD_NAME); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); InternalAggregation histogram = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggBuilder, fieldType, valueFieldType).withQuery(query) ); verify.accept(histogram); diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/MovFnAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/MovFnAggregatorTests.java index 98d5f10136090..8fec4d4d460a2 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/MovFnAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/MovFnAggregatorTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -162,14 +161,13 @@ private void executeTestCase(Query query, DateHistogramAggregationBuilder aggBui } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggBuilder.field()); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); InternalDateHistogram histogram; histogram = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggBuilder, fieldType, valueFieldType).withMaxBuckets(1000).withQuery(query) ); verify.accept(histogram); diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index e8182a9bbe218..ddc36d426edc3 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -65,7 +64,7 @@ public void testNoDocs() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - testCase(new MatchAllDocsQuery(), newIndexSearcher(indexReader), childrenToParent -> { + testCase(new MatchAllDocsQuery(), indexReader, childrenToParent -> { assertEquals(0, childrenToParent.getDocCount()); Aggregation parentAggregation = childrenToParent.getAggregations().get("in_parent"); assertEquals(0, childrenToParent.getDocCount()); @@ -88,10 +87,8 @@ public void testParentChild() throws IOException { DirectoryReader.open(directory), new ShardId(new Index("foo", "_na_"), 1) ); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - // verify with all documents - testCase(new MatchAllDocsQuery(), indexSearcher, parent -> { + testCase(new MatchAllDocsQuery(), indexReader, parent -> { int expectedTotalParents = 0; int expectedMinValue = Integer.MAX_VALUE; for (Tuple expectedValues : expectedParentChildRelations.values()) { @@ -109,7 +106,7 @@ public void testParentChild() throws IOException { // verify for each children for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId("child0_" + parent)), indexSearcher, aggregation -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId("child0_" + parent)), indexReader, aggregation -> { assertEquals( "Expected one result for min-aggregation for parent: " + parent + ", but had aggregation-results: " + aggregation, 1, @@ -155,10 +152,8 @@ public void testParentChildTerms() throws IOException { DirectoryReader.open(directory), new ShardId(new Index("foo", "_na_"), 1) ); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - // verify a terms-aggregation inside the parent-aggregation - testCaseTerms(new MatchAllDocsQuery(), indexSearcher, parent -> { + testCaseTerms(new MatchAllDocsQuery(), indexReader, parent -> { assertNotNull(parent); assertTrue(JoinAggregationInspectionHelper.hasValue(parent)); LongTerms valueTerms = parent.getAggregations().get("value_terms"); @@ -198,11 +193,9 @@ public void testTermsParentChildTerms() throws IOException { DirectoryReader.open(directory), new ShardId(new Index("foo", "_na_"), 1) ); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - // verify a terms-aggregation inside the parent-aggregation which itself is inside a // terms-aggregation on the child-documents - testCaseTermsParentTerms(new MatchAllDocsQuery(), indexSearcher, longTerms -> { + testCaseTermsParentTerms(new MatchAllDocsQuery(), indexReader, longTerms -> { assertNotNull(longTerms); for (LongTerms.Bucket bucket : longTerms.getBuckets()) { @@ -258,34 +251,28 @@ private static SortedDocValuesField createJoinField(String parentType, String id return new SortedDocValuesField("join_field#" + parentType, new BytesRef(id)); } - private void testCase(Query query, IndexSearcher indexSearcher, Consumer verify) throws IOException { + private void testCase(Query query, DirectoryReader reader, Consumer verify) throws IOException { ParentAggregationBuilder aggregationBuilder = new ParentAggregationBuilder("_name", CHILD_TYPE); aggregationBuilder.subAggregation(new MinAggregationBuilder("in_parent").field("number")); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); - InternalParent result = searchAndReduce( - indexSearcher, - new AggTestConfig(aggregationBuilder, withJoinFields(fieldType)).withQuery(query) - ); + InternalParent result = searchAndReduce(reader, new AggTestConfig(aggregationBuilder, withJoinFields(fieldType)).withQuery(query)); verify.accept(result); } - private void testCaseTerms(Query query, IndexSearcher indexSearcher, Consumer verify) throws IOException { + private void testCaseTerms(Query query, DirectoryReader reader, Consumer verify) throws IOException { ParentAggregationBuilder aggregationBuilder = new ParentAggregationBuilder("_name", CHILD_TYPE); aggregationBuilder.subAggregation(new TermsAggregationBuilder("value_terms").field("number")); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); - InternalParent result = searchAndReduce( - indexSearcher, - new AggTestConfig(aggregationBuilder, withJoinFields(fieldType)).withQuery(query) - ); + InternalParent result = searchAndReduce(reader, new AggTestConfig(aggregationBuilder, withJoinFields(fieldType)).withQuery(query)); verify.accept(result); } // run a terms aggregation on the number in child-documents, then a parent aggregation and then terms on the parent-number - private void testCaseTermsParentTerms(Query query, IndexSearcher indexSearcher, Consumer verify) throws IOException { + private void testCaseTermsParentTerms(Query query, DirectoryReader reader, Consumer verify) throws IOException { AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("subvalue_terms").field("subNumber") .subAggregation( new ParentAggregationBuilder("to_parent", CHILD_TYPE).subAggregation( @@ -296,7 +283,7 @@ private void testCaseTermsParentTerms(Query query, IndexSearcher indexSearcher, MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); MappedFieldType subFieldType = new NumberFieldMapper.NumberFieldType("subNumber", NumberFieldMapper.NumberType.LONG); LongTerms result = searchAndReduce( - indexSearcher, + reader, new AggTestConfig(aggregationBuilder, withJoinFields(fieldType, subFieldType)).withQuery(query) ); verify.accept(result); diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java index 8161d4d03e34c..0bb64dcccbf11 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -14,8 +14,8 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -71,7 +71,7 @@ public void testNoDocs() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - testCase(new MatchAllDocsQuery(), newIndexSearcher(indexReader), parentToChild -> { + testCase(new MatchAllDocsQuery(), indexReader, parentToChild -> { assertEquals(0, parentToChild.getDocCount()); assertEquals(Double.POSITIVE_INFINITY, ((Min) parentToChild.getAggregations().get("in_child")).value(), Double.MIN_VALUE); }); @@ -90,9 +90,7 @@ public void testParentChild() throws IOException { DirectoryReader.open(directory), new ShardId(new Index("foo", "_na_"), 1) ); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - - testCase(new MatchAllDocsQuery(), indexSearcher, child -> { + testCase(new MatchAllDocsQuery(), indexReader, child -> { int expectedTotalChildren = 0; int expectedMinValue = Integer.MAX_VALUE; for (Tuple expectedValues : expectedParentChildRelations.values()) { @@ -105,7 +103,7 @@ public void testParentChild() throws IOException { }); for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(parent)), indexSearcher, child -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(parent)), indexReader, child -> { assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount()); assertEquals( expectedParentChildRelations.get(parent).v2(), @@ -132,8 +130,6 @@ public void testParentChildAsSubAgg() throws IOException { new ShardId(new Index("foo", "_na_"), 1) ) ) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - AggregationBuilder request = new TermsAggregationBuilder("t").field("kwd") .subAggregation( new ChildrenAggregationBuilder("children", CHILD_TYPE).subAggregation( @@ -154,7 +150,7 @@ public void testParentChildAsSubAgg() throws IOException { expectedOddMin = Math.min(expectedOddMin, e.getValue().v2()); } } - StringTerms result = searchAndReduce(indexSearcher, new AggTestConfig(request, withJoinFields(longField("number"), kwd))); + StringTerms result = searchAndReduce(indexReader, new AggTestConfig(request, withJoinFields(longField("number"), kwd))); StringTerms.Bucket evenBucket = result.getBucketByKey("even"); InternalChildren evenChildren = evenBucket.getAggregations().get("children"); @@ -186,9 +182,6 @@ public void testBestDeferringCollectorWithSubAggOfChildrenAggNeedingScores() thr new ShardId(new Index("foo", "_na_"), 1) ) ) { - // maybeWrap should be false here, in ValueSource.java we sometimes cast to DirectoryReader and - // these casts can then fail if the maybeWrap is true. - var indexSearcher = newIndexSearcher(indexReader); // invalid usage, { var aggregationBuilder = new ChildrenAggregationBuilder("_name1", CHILD_TYPE); @@ -201,7 +194,7 @@ public void testBestDeferringCollectorWithSubAggOfChildrenAggNeedingScores() thr var fieldType2 = new KeywordFieldMapper.KeywordFieldType("string_field", false, true, Map.of()); var e = expectThrows(RuntimeException.class, () -> { searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, withJoinFields(fieldType, fieldType2)).withQuery( new TermQuery(new Term("join_field", "parent_type")) ) @@ -231,7 +224,7 @@ public void testBestDeferringCollectorWithSubAggOfChildrenAggNeedingScores() thr var fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); var fieldType2 = new KeywordFieldMapper.KeywordFieldType("string_field", false, true, Map.of()); InternalChildren result = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, withJoinFields(fieldType, fieldType2)).withQuery( new TermQuery(new Term("join_field", "parent_type")) ) @@ -297,14 +290,14 @@ private static SortedDocValuesField createJoinField(String parentType, String id return new SortedDocValuesField("join_field#" + parentType, new BytesRef(id)); } - private void testCase(Query query, IndexSearcher indexSearcher, Consumer verify) throws IOException { + private void testCase(Query query, IndexReader indexReader, Consumer verify) throws IOException { ChildrenAggregationBuilder aggregationBuilder = new ChildrenAggregationBuilder("_name", CHILD_TYPE); aggregationBuilder.subAggregation(new MinAggregationBuilder("in_child").field("number")); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); InternalChildren result = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, withJoinFields(fieldType)).withQuery(query) ); verify.accept(result); diff --git a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index 8cbdbbe3a343a..5d6d51e548eea 100644 --- a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -34,8 +35,10 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.combinedFieldsQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; @@ -43,7 +46,9 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; +import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; +import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery; import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; @@ -1273,4 +1278,80 @@ public void testWrappedWithConstantScore() throws Exception { assertEquals(1, response.getHits().getTotalHits().value); } + + public void testWithWildcardFieldNames() throws Exception { + assertAcked( + indicesAdmin().prepareCreate("test") + .setMapping( + "text_1", + "type=text", + "q_simple", + "type=percolator", + "q_string", + "type=percolator", + "q_match", + "type=percolator", + "q_combo", + "type=percolator" + ) + ); + + client().prepareIndex("test") + .setId("1") + .setSource( + jsonBuilder().startObject() + .field("q_simple", simpleQueryStringQuery("yada").fields(Map.of("text*", 1f))) + .field("q_string", queryStringQuery("yada").fields(Map.of("text*", 1f))) + .field("q_match", multiMatchQuery("yada", "text*")) + .field("q_combo", combinedFieldsQuery("yada", "text*")) + .endObject() + ) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .execute() + .actionGet(); + + SearchResponse response = client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "q_simple", + BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), + XContentType.JSON + ) + ) + .get(); + assertEquals(1, response.getHits().getTotalHits().value); + + response = client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "q_string", + BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), + XContentType.JSON + ) + ) + .get(); + assertEquals(1, response.getHits().getTotalHits().value); + + response = client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "q_match", + BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), + XContentType.JSON + ) + ) + .get(); + assertEquals(1, response.getHits().getTotalHits().value); + + response = client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "q_combo", + BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), + XContentType.JSON + ) + ) + .get(); + assertEquals(1, response.getHits().getTotalHits().value); + } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 3bf01f6f7493a..1f9ad0136790c 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -530,7 +530,7 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { PercolatorFieldMapper.PercolatorFieldType pft = (PercolatorFieldMapper.PercolatorFieldType) fieldType; String queryName = this.name != null ? this.name : pft.name(); SearchExecutionContext percolateShardContext = wrap(context); - PercolatorFieldMapper.configureContext(percolateShardContext, pft.mapUnmappedFieldsAsText); + percolateShardContext = PercolatorFieldMapper.configureContext(percolateShardContext, pft.mapUnmappedFieldsAsText); PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, percolateShardContext); return pft.percolateQuery(queryName, queryStore, documents, docSearcher, excludeNestedDocuments, context.indexVersionCreated()); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 82a8c7057065f..f5b3f06793183 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -55,6 +55,7 @@ import org.elasticsearch.index.mapper.SourceValueFetcher; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.mapper.ValueFetcher; +import org.elasticsearch.index.query.FilteredSearchExecutionContext; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.Rewriteable; @@ -409,7 +410,7 @@ public void parse(DocumentParserContext context) throws IOException { throw new IllegalArgumentException("a document can only contain one percolator query"); } - configureContext(executionContext, isMapUnmappedFieldAsText()); + executionContext = configureContext(executionContext, isMapUnmappedFieldAsText()); QueryBuilder queryBuilder = parseQueryBuilder(context); // Fetching of terms, shapes and indexed scripts happen during this rewrite: @@ -509,7 +510,8 @@ void processQuery(Query query, DocumentParserContext context) { doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch)); } - static void configureContext(SearchExecutionContext context, boolean mapUnmappedFieldsAsString) { + static SearchExecutionContext configureContext(SearchExecutionContext context, boolean mapUnmappedFieldsAsString) { + SearchExecutionContext wrapped = wrapAllEmptyTextFields(context); // This means that fields in the query need to exist in the mapping prior to registering this query // The reason that this is required, is that if a field doesn't exist then the query assumes defaults, which may be undesired. // @@ -522,8 +524,9 @@ static void configureContext(SearchExecutionContext context, boolean mapUnmapped // // if index.percolator.map_unmapped_fields_as_string is set to true, query can contain unmapped fields which will be mapped // as an analyzed string. - context.setAllowUnmappedFields(false); - context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); + wrapped.setAllowUnmappedFields(false); + wrapped.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); + return wrapped; } @Override @@ -570,4 +573,17 @@ static byte[] encodeRange(String rangeFieldName, byte[] minEncoded, byte[] maxEn System.arraycopy(maxEncoded, 0, bytes, BinaryRange.BYTES + offset, maxEncoded.length); return bytes; } + + // When expanding wildcard fields for term queries, we don't expand to fields that are empty. + // This is sane behavior for typical usage. But for percolator, the fields for the may not have any terms + // Consequently, we may erroneously skip expanding those term fields. + // This override allows mapped field values to expand via wildcard input, even if the field is empty in the shard. + static SearchExecutionContext wrapAllEmptyTextFields(SearchExecutionContext searchExecutionContext) { + return new FilteredSearchExecutionContext(searchExecutionContext) { + @Override + public boolean fieldExistsInIndex(String fieldname) { + return true; + } + }; + } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index 1ce8f721baf42..9a0d6692723e3 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -383,10 +383,10 @@ private void setupPipeline(Channel ch, boolean isRemoteClusterServerChannel) { pipeline.addLast("logging", ESLoggingHandler.INSTANCE); } pipeline.addLast("chunked_writer", new Netty4WriteThrottlingHandler(getThreadPool().getThreadContext())); - pipeline.addLast("dispatcher", new Netty4MessageInboundHandler(this, getInboundPipeline(isRemoteClusterServerChannel))); + pipeline.addLast("dispatcher", new Netty4MessageInboundHandler(this, getInboundPipeline(ch, isRemoteClusterServerChannel))); } - protected InboundPipeline getInboundPipeline(boolean isRemoteClusterServerChannel) { + protected InboundPipeline getInboundPipeline(Channel ch, boolean isRemoteClusterServerChannel) { return new InboundPipeline( getStatsTracker(), threadPool::relativeTimeInMillis, diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java index e2ae9fd17bb23..faa1657ae1ef7 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java @@ -35,7 +35,6 @@ import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; -import java.util.stream.Stream; import static org.elasticsearch.index.mapper.TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM; import static org.elasticsearch.index.mapper.TimeSeriesParams.TIME_SERIES_METRIC_PARAM; @@ -545,8 +544,13 @@ void add( } } - Stream getIndices() { - return indicesList.stream().flatMap(c -> Arrays.stream(c.indices)); + void getIndices(Set into) { + for (int i = 0; i < indicesList.size(); i++) { + IndexCaps indexCaps = indicesList.get(i); + for (String element : indexCaps.indices) { + into.add(element); + } + } } private String[] filterIndices(int length, Predicate pred) { diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 0c5645826219a..79b9fa5099467 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -23,9 +23,9 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.shard.ShardId; @@ -47,6 +47,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -310,69 +311,88 @@ private static FieldCapabilitiesResponse merge( } task.ensureNotCancelled(); - return new FieldCapabilitiesResponse( - indices, - buildResponseMap(indexResponsesMap, responseMapBuilder, request.includeUnmapped()), - failures - ); + Map> responseMap = Maps.newMapWithExpectedSize(responseMapBuilder.size()); + if (request.includeUnmapped()) { + collectResponseMapIncludingUnmapped(indices, responseMapBuilder, responseMap); + } else { + collectResponseMap(responseMapBuilder, responseMap); + } + return new FieldCapabilitiesResponse(indices, Collections.unmodifiableMap(responseMap), failures); } - private static Map> buildResponseMap( - Map indexResponsesMap, + private static void collectResponseMapIncludingUnmapped( + String[] indices, Map> responseMapBuilder, - boolean includeUnmapped + Map> responseMap ) { - Map> responseMap = Maps.newMapWithExpectedSize(responseMapBuilder.size()); - final var indices = indexResponsesMap.keySet(); + final Set mappedScratch = new HashSet<>(); for (Map.Entry> entry : responseMapBuilder.entrySet()) { var typeMapBuilder = entry.getValue().entrySet(); - Function unmapped = null; - if (includeUnmapped) { - // do this directly, rather than using the builder, to save creating a whole lot of objects we don't need - unmapped = getUnmappedFields( - indices, - entry.getKey(), - typeMapBuilder.stream().flatMap(t -> t.getValue().getIndices()).collect(Collectors.toSet()) - ); + // do this directly, rather than using the builder, to save creating a whole lot of objects we don't need + mappedScratch.clear(); + for (Map.Entry b : typeMapBuilder) { + b.getValue().getIndices(mappedScratch); } + var unmapped = getUnmappedFields(indices, entry.getKey(), mappedScratch); final int resSize = typeMapBuilder.size() + (unmapped == null ? 0 : 1); - boolean multiTypes = resSize > 1; - final Map res = Maps.newHashMapWithExpectedSize(resSize); - for (Map.Entry e : typeMapBuilder) { - res.put(e.getKey(), e.getValue().build(multiTypes)); - } + final Map res = capabilities(resSize, typeMapBuilder); if (unmapped != null) { - res.put("unmapped", unmapped.apply(multiTypes)); + res.put("unmapped", unmapped.apply(resSize > 1)); } responseMap.put(entry.getKey(), Collections.unmodifiableMap(res)); } - return Collections.unmodifiableMap(responseMap); + } + + private static void collectResponseMap( + Map> responseMapBuilder, + Map> responseMap + ) { + for (Map.Entry> entry : responseMapBuilder.entrySet()) { + var typeMapBuilder = entry.getValue().entrySet(); + responseMap.put(entry.getKey(), Collections.unmodifiableMap(capabilities(typeMapBuilder.size(), typeMapBuilder))); + } + } + + private static Map capabilities(int resSize, Set> builders) { + boolean multiTypes = resSize > 1; + final Map res = Maps.newHashMapWithExpectedSize(resSize); + for (Map.Entry e : builders) { + res.put(e.getKey(), e.getValue().build(multiTypes)); + } + return res; } @Nullable - private static Function getUnmappedFields(Set indices, String field, Set mappedIndices) { - if (mappedIndices.size() != indices.size()) { - return mt -> new FieldCapabilities( - field, - "unmapped", - false, - false, - false, - false, - null, - mt ? Sets.difference(indices, mappedIndices).toArray(Strings.EMPTY_ARRAY) : null, - null, - null, - null, - null, - Map.of() - ); + private static Function getUnmappedFields(String[] indices, String field, Set mappedIndices) { + if (mappedIndices.size() != indices.length) { + return mt -> { + final String[] diff; + if (mt) { + diff = new String[indices.length - mappedIndices.size()]; + Iterator indicesIter = Iterators.forArray(indices); + for (int i = 0; i < diff.length; i++) { + diff[i] = nextIndex(indicesIter, mappedIndices); + } + } else { + diff = null; + } + return new FieldCapabilities(field, "unmapped", false, false, false, false, null, diff, null, null, null, null, Map.of()); + }; } return null; } + private static String nextIndex(Iterator iter, Set filtered) { + while (true) { + String index = iter.next(); + if (filtered.contains(index) == false) { + return index; + } + } + } + private static void innerMerge( String[] indices, Map> responseMapBuilder, diff --git a/server/src/main/java/org/elasticsearch/common/util/Maps.java b/server/src/main/java/org/elasticsearch/common/util/Maps.java index 5fcdde4e0b579..da5089983ceb5 100644 --- a/server/src/main/java/org/elasticsearch/common/util/Maps.java +++ b/server/src/main/java/org/elasticsearch/common/util/Maps.java @@ -151,9 +151,29 @@ public static boolean deepEquals(Map left, Map right) { if (left == null || right == null || left.size() != right.size()) { return false; } - return left.entrySet() - .stream() - .allMatch(e -> right.containsKey(e.getKey()) && Objects.deepEquals(e.getValue(), right.get(e.getKey()))); + + for (Map.Entry e : left.entrySet()) { + if (right.containsKey(e.getKey()) == false) { + return false; + } + + V v1 = e.getValue(); + V v2 = right.get(e.getKey()); + if (v1 instanceof Map && v2 instanceof Map) { + // if the values are both maps, then recursively compare them with Maps.deepEquals + @SuppressWarnings("unchecked") + Map m1 = (Map) v1; + @SuppressWarnings("unchecked") + Map m2 = (Map) v2; + if (Maps.deepEquals(m1, m2) == false) { + return false; + } + } else if (Objects.deepEquals(v1, v2) == false) { + return false; + } + } + + return true; } /** diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 74d68276c1e3f..3a2b01f5cc9c4 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -283,7 +283,7 @@ public final class IndexSettings { TimeValue.MINUS_ONE, Property.NodeScope ); // TODO: remove setting - public static TimeValue STATELESS_DEFAULT_REFRESH_INTERVAL = TimeValue.timeValueSeconds(10); // TODO: settle on right value + public static TimeValue STATELESS_DEFAULT_REFRESH_INTERVAL = TimeValue.timeValueSeconds(15); // TODO: this value is still not final public static TimeValue STATELESS_MIN_NON_FAST_REFRESH_INTERVAL = TimeValue.timeValueSeconds(5); public static final Setting INDEX_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.refresh_interval", (settings) -> { if (EXISTING_SHARDS_ALLOCATOR_SETTING.get(settings).equals("stateless") && INDEX_FAST_REFRESH_SETTING.get(settings) == false) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index d19e0bc716a9a..512c35a146d0c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -564,4 +564,7 @@ public DynamicTemplate[] getAllDynamicTemplates() { return documentMapper().mapping().getRoot().dynamicTemplates(); } + public MapperRegistry getMapperRegistry() { + return mapperRegistry; + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/FilteredSearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/FilteredSearchExecutionContext.java new file mode 100644 index 0000000000000..a966d94e8b72f --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/FilteredSearchExecutionContext.java @@ -0,0 +1,381 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; +import org.apache.lucene.search.similarities.Similarity; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.mapper.DocumentParsingException; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.NestedLookup; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceLoader; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.query.support.NestedScope; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.search.NestedDocuments; +import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; +import org.elasticsearch.search.lookup.LeafFieldLookupProvider; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.lookup.SourceProvider; +import org.elasticsearch.xcontent.XContentParserConfiguration; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.Function; +import java.util.function.Predicate; + +/** + * This is NOT a simple clone of the SearchExecutionContext. + * While it does "clone-esque" things, it delegates everything it can to the passed search execution context. + * + * Do NOT use this if you mean to clone the context as you are planning to make modifications + */ +public class FilteredSearchExecutionContext extends SearchExecutionContext { + private final SearchExecutionContext in; + + public FilteredSearchExecutionContext(SearchExecutionContext in) { + super(in); + this.in = in; + } + + @Override + public Similarity getSearchSimilarity() { + return in.getSearchSimilarity(); + } + + @Override + public Similarity getDefaultSimilarity() { + return in.getDefaultSimilarity(); + } + + @Override + public List defaultFields() { + return in.defaultFields(); + } + + @Override + public boolean queryStringLenient() { + return in.queryStringLenient(); + } + + @Override + public boolean queryStringAnalyzeWildcard() { + return in.queryStringAnalyzeWildcard(); + } + + @Override + public boolean queryStringAllowLeadingWildcard() { + return in.queryStringAllowLeadingWildcard(); + } + + @Override + public BitSetProducer bitsetFilter(Query filter) { + return in.bitsetFilter(filter); + } + + @Override + public > IFD getForField( + MappedFieldType fieldType, + MappedFieldType.FielddataOperation fielddataOperation + ) { + return in.getForField(fieldType, fielddataOperation); + } + + @Override + public void addNamedQuery(String name, Query query) { + in.addNamedQuery(name, query); + } + + @Override + public Map copyNamedQueries() { + return in.copyNamedQueries(); + } + + @Override + public ParsedDocument parseDocument(SourceToParse source) throws DocumentParsingException { + return in.parseDocument(source); + } + + @Override + public NestedLookup nestedLookup() { + return in.nestedLookup(); + } + + @Override + public boolean hasMappings() { + return in.hasMappings(); + } + + @Override + public boolean isFieldMapped(String name) { + return in.isFieldMapped(name); + } + + @Override + public boolean isMetadataField(String field) { + return in.isMetadataField(field); + } + + @Override + public boolean isMultiField(String field) { + return in.isMultiField(field); + } + + @Override + public Set sourcePath(String fullName) { + return in.sourcePath(fullName); + } + + @Override + public boolean isSourceEnabled() { + return in.isSourceEnabled(); + } + + @Override + public boolean isSourceSynthetic() { + return in.isSourceSynthetic(); + } + + @Override + public SourceLoader newSourceLoader(boolean forceSyntheticSource) { + return in.newSourceLoader(forceSyntheticSource); + } + + @Override + public MappedFieldType buildAnonymousFieldType(String type) { + return in.buildAnonymousFieldType(type); + } + + @Override + public Analyzer getIndexAnalyzer(Function unindexedFieldAnalyzer) { + return in.getIndexAnalyzer(unindexedFieldAnalyzer); + } + + @Override + public void setAllowedFields(Predicate allowedFields) { + in.setAllowedFields(allowedFields); + } + + @Override + public boolean containsBrokenAnalysis(String field) { + return in.containsBrokenAnalysis(field); + } + + @Override + public SearchLookup lookup() { + return in.lookup(); + } + + @Override + public void setLookupProviders( + SourceProvider sourceProvider, + Function fieldLookupProvider + ) { + in.setLookupProviders(sourceProvider, fieldLookupProvider); + } + + @Override + public NestedScope nestedScope() { + return in.nestedScope(); + } + + @Override + public IndexVersion indexVersionCreated() { + return in.indexVersionCreated(); + } + + @Override + public boolean indexSortedOnField(String field) { + return in.indexSortedOnField(field); + } + + @Override + public ParsedQuery toQuery(QueryBuilder queryBuilder) { + return in.toQuery(queryBuilder); + } + + @Override + public Index index() { + return in.index(); + } + + @Override + public FactoryType compile(Script script, ScriptContext context) { + return in.compile(script, context); + } + + @Override + public void disableCache() { + in.disableCache(); + } + + @Override + public void registerAsyncAction(BiConsumer> asyncAction) { + in.registerAsyncAction(asyncAction); + } + + @Override + @SuppressWarnings("rawtypes") + public void executeAsyncActions(ActionListener listener) { + in.executeAsyncActions(listener); + } + + @Override + public int getShardId() { + return in.getShardId(); + } + + @Override + public int getShardRequestIndex() { + return in.getShardRequestIndex(); + } + + @Override + public long nowInMillis() { + return in.nowInMillis(); + } + + @Override + public Client getClient() { + return in.getClient(); + } + + @Override + public IndexReader getIndexReader() { + return in.getIndexReader(); + } + + @Override + public IndexSearcher searcher() { + return in.searcher(); + } + + @Override + public boolean fieldExistsInIndex(String fieldname) { + return in.fieldExistsInIndex(fieldname); + } + + @Override + public MappingLookup.CacheKey mappingCacheKey() { + return in.mappingCacheKey(); + } + + @Override + public NestedDocuments getNestedDocuments() { + return in.getNestedDocuments(); + } + + @Override + public XContentParserConfiguration getParserConfig() { + return in.getParserConfig(); + } + + @Override + public CoordinatorRewriteContext convertToCoordinatorRewriteContext() { + return in.convertToCoordinatorRewriteContext(); + } + + @Override + public QueryRewriteContext convertToIndexMetadataContext() { + return in.convertToIndexMetadataContext(); + } + + @Override + public DataRewriteContext convertToDataRewriteContext() { + return in.convertToDataRewriteContext(); + } + + @Override + public MappedFieldType getFieldType(String name) { + return in.getFieldType(name); + } + + @Override + protected MappedFieldType fieldType(String name) { + return in.fieldType(name); + } + + @Override + public IndexAnalyzers getIndexAnalyzers() { + return in.getIndexAnalyzers(); + } + + @Override + MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) { + return in.failIfFieldMappingNotFound(name, fieldMapping); + } + + @Override + public void setAllowUnmappedFields(boolean allowUnmappedFields) { + in.setAllowUnmappedFields(allowUnmappedFields); + } + + @Override + public void setMapUnmappedFieldAsString(boolean mapUnmappedFieldAsString) { + in.setMapUnmappedFieldAsString(mapUnmappedFieldAsString); + } + + @Override + public NamedWriteableRegistry getWriteableRegistry() { + return in.getWriteableRegistry(); + } + + @Override + public ValuesSourceRegistry getValuesSourceRegistry() { + return in.getValuesSourceRegistry(); + } + + @Override + public boolean allowExpensiveQueries() { + return in.allowExpensiveQueries(); + } + + @Override + public boolean hasAsyncActions() { + return in.hasAsyncActions(); + } + + @Override + public Index getFullyQualifiedIndex() { + return in.getFullyQualifiedIndex(); + } + + @Override + public IndexSettings getIndexSettings() { + return in.getIndexSettings(); + } + + @Override + public boolean indexMatches(String pattern) { + return in.indexMatches(pattern); + } + + @Override + public Set getMatchingFieldNames(String pattern) { + return in.getMatchingFieldNames(pattern); + } +} diff --git a/server/src/main/java/org/elasticsearch/transport/Header.java b/server/src/main/java/org/elasticsearch/transport/Header.java index 2c947e80e96d6..5daec4ff29722 100644 --- a/server/src/main/java/org/elasticsearch/transport/Header.java +++ b/server/src/main/java/org/elasticsearch/transport/Header.java @@ -49,7 +49,7 @@ long getRequestId() { return requestId; } - boolean isRequest() { + public boolean isRequest() { return TransportStatus.isRequest(status); } @@ -61,7 +61,7 @@ boolean isError() { return TransportStatus.isError(status); } - boolean isHandshake() { + public boolean isHandshake() { return TransportStatus.isHandshake(status); } @@ -77,6 +77,11 @@ public Compression.Scheme getCompressionScheme() { return compressionScheme; } + public Map getRequestHeaders() { + var allHeaders = getHeaders(); + return allHeaders == null ? null : allHeaders.v1(); + } + boolean needsToReadVariableHeader() { return headers == null; } diff --git a/server/src/main/java/org/elasticsearch/transport/HeaderValidationException.java b/server/src/main/java/org/elasticsearch/transport/HeaderValidationException.java new file mode 100644 index 0000000000000..03cf76e31ec08 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/transport/HeaderValidationException.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.transport; + +/** + * This is used to pack the validation exception with the associated header. + */ +public class HeaderValidationException extends RuntimeException { + public final Header header; + public final Exception validationException; + + public HeaderValidationException(Header header, Exception validationException) { + this.header = header; + this.validationException = validationException; + } +} diff --git a/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java b/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java index b0298dd2ec477..9643a41a42301 100644 --- a/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java +++ b/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java @@ -35,14 +35,20 @@ public class InboundDecoder implements Releasable { private boolean isCompressed = false; private boolean isClosed = false; private final ByteSizeValue maxHeaderSize; + private final ChannelType channelType; public InboundDecoder(Recycler recycler) { - this(recycler, new ByteSizeValue(2, ByteSizeUnit.GB)); + this(recycler, new ByteSizeValue(2, ByteSizeUnit.GB), ChannelType.MIX); } - public InboundDecoder(Recycler recycler, ByteSizeValue maxHeaderSize) { + public InboundDecoder(Recycler recycler, ChannelType channelType) { + this(recycler, new ByteSizeValue(2, ByteSizeUnit.GB), channelType); + } + + public InboundDecoder(Recycler recycler, ByteSizeValue maxHeaderSize, ChannelType channelType) { this.recycler = recycler; this.maxHeaderSize = maxHeaderSize; + this.channelType = channelType; } public int decode(ReleasableBytesReference reference, Consumer fragmentConsumer) throws IOException { @@ -70,7 +76,7 @@ public int internalDecode(ReleasableBytesReference reference, Consumer f } else { totalNetworkSize = messageLength + TcpHeader.BYTES_REQUIRED_FOR_MESSAGE_SIZE; - Header header = readHeader(messageLength, reference); + Header header = readHeader(messageLength, reference, channelType); bytesConsumed += headerBytesToRead; if (header.isCompressed()) { isCompressed = true; @@ -186,7 +192,7 @@ private static int headerBytesToRead(BytesReference reference, ByteSizeValue max } } - private static Header readHeader(int networkMessageSize, BytesReference bytesReference) throws IOException { + private static Header readHeader(int networkMessageSize, BytesReference bytesReference, ChannelType channelType) throws IOException { try (StreamInput streamInput = bytesReference.streamInput()) { streamInput.skip(TcpHeader.BYTES_REQUIRED_FOR_MESSAGE_SIZE); long requestId = streamInput.readLong(); @@ -194,6 +200,11 @@ private static Header readHeader(int networkMessageSize, BytesReference bytesRef int remoteVersion = streamInput.readInt(); Header header = new Header(networkMessageSize, requestId, status, TransportVersion.fromId(remoteVersion)); + if (channelType == ChannelType.SERVER && header.isResponse()) { + throw new IllegalArgumentException("server channels do not accept inbound responses, only requests, closing channel"); + } else if (channelType == ChannelType.CLIENT && header.isRequest()) { + throw new IllegalArgumentException("client channels do not accept inbound requests, only responses, closing channel"); + } if (header.isHandshake()) { checkHandshakeVersionCompatibility(header.getVersion()); } else { @@ -241,4 +252,10 @@ static void checkVersionCompatibility(TransportVersion remoteVersion) { ); } } + + public enum ChannelType { + SERVER, + CLIENT, + MIX + } } diff --git a/server/src/main/java/org/elasticsearch/transport/InboundPipeline.java b/server/src/main/java/org/elasticsearch/transport/InboundPipeline.java index 085c25ef08929..b5b3baaebd1ca 100644 --- a/server/src/main/java/org/elasticsearch/transport/InboundPipeline.java +++ b/server/src/main/java/org/elasticsearch/transport/InboundPipeline.java @@ -109,8 +109,7 @@ public void doHandleBytes(TcpChannel channel, ReleasableBytesReference reference private void forwardFragments(TcpChannel channel, ArrayList fragments) throws IOException { for (Object fragment : fragments) { if (fragment instanceof Header) { - assert aggregator.isAggregating() == false; - aggregator.headerReceived((Header) fragment); + headerReceived((Header) fragment); } else if (fragment instanceof Compression.Scheme) { assert aggregator.isAggregating(); aggregator.updateCompressionScheme((Compression.Scheme) fragment); @@ -134,6 +133,11 @@ private void forwardFragments(TcpChannel channel, ArrayList fragments) t } } + protected void headerReceived(Header header) { + assert aggregator.isAggregating() == false; + aggregator.headerReceived(header); + } + private static boolean endOfMessage(Object fragment) { return fragment == InboundDecoder.PING || fragment == InboundDecoder.END_CONTENT || fragment instanceof Exception; } diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index 72c5a7aae005b..e433ae9a1de62 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -753,6 +753,23 @@ static void handleException(TcpChannel channel, Exception e, Lifecycle lifecycle logger.warn(() -> format("%s, [%s], closing connection", e.getMessage(), channel)); } else if (e instanceof TransportNotReadyException) { logger.debug(() -> format("%s on [%s], closing connection", e.getMessage(), channel)); + } else if (e instanceof HeaderValidationException headerValidationException) { + Header header = headerValidationException.header; + if (channel.isOpen()) { + try { + outboundHandler.sendErrorResponse( + header.getVersion(), + channel, + header.getRequestId(), + header.getActionName(), + ResponseStatsConsumer.NONE, + headerValidationException.validationException + ); + } catch (IOException inner) { + inner.addSuppressed(headerValidationException.validationException); + logger.warn(() -> "Failed to send error message back to client for validation failure", inner); + } + } } else { logger.warn(() -> "exception caught on transport layer [" + channel + "], closing connection", e); } diff --git a/server/src/test/java/org/elasticsearch/common/util/MapsTests.java b/server/src/test/java/org/elasticsearch/common/util/MapsTests.java index 532df29c00b70..525c04cae9957 100644 --- a/server/src/test/java/org/elasticsearch/common/util/MapsTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/MapsTests.java @@ -99,7 +99,31 @@ public void testOfEntries() { assertMapEntriesAndImmutability(map, entries); } - public void testDeepEquals() { + public void testDeepEqualsMapsWithSimpleValues() { + final Supplier keyGenerator = () -> randomAlphaOfLengthBetween(1, 5); + final Supplier valueGenerator = () -> randomInt(5); + final Map map = randomMap(randomInt(5), keyGenerator, valueGenerator); + final Map mapCopy = new HashMap<>(map); + + assertTrue(Maps.deepEquals(map, mapCopy)); + + final Map mapModified = mapCopy; + if (mapModified.isEmpty()) { + mapModified.put(keyGenerator.get(), valueGenerator.get()); + } else { + if (randomBoolean()) { + final String randomKey = mapModified.keySet().toArray(new String[0])[randomInt(mapModified.size() - 1)]; + final int value = mapModified.get(randomKey); + mapModified.put(randomKey, randomValueOtherThanMany((v) -> v.equals(value), valueGenerator)); + } else { + mapModified.put(randomValueOtherThanMany(mapModified::containsKey, keyGenerator), valueGenerator.get()); + } + } + + assertFalse(Maps.deepEquals(map, mapModified)); + } + + public void testDeepEqualsMapsWithArrayValues() { final Supplier keyGenerator = () -> randomAlphaOfLengthBetween(1, 5); final Supplier arrayValueGenerator = () -> random().ints(randomInt(5)).toArray(); final Map map = randomMap(randomInt(5), keyGenerator, arrayValueGenerator); @@ -125,6 +149,42 @@ public void testDeepEquals() { assertFalse(Maps.deepEquals(map, mapModified)); } + public void testDeepEqualsMapsWithMapValuesSimple() { + Map> m1 = Map.of("a", Map.of("b", new int[] { 1 })); + Map> m2 = Map.of("a", Map.of("b", new int[] { 1 })); + assertTrue(Maps.deepEquals(m1, m2)); + } + + public void testDeepEqualsMapsWithMapValues() { + final Supplier keyGenerator = () -> randomAlphaOfLengthBetween(1, 5); + final Supplier> mapValueGenerator = () -> Map.of("nested", random().ints(randomInt(5)).toArray()); + final Map> map = randomMap(randomInt(5), keyGenerator, mapValueGenerator); + final Map> mapCopy = map.entrySet().stream().collect(toMap(Map.Entry::getKey, e -> { + int[] value = e.getValue().get("nested"); + return Map.of("nested", Arrays.copyOf(value, value.length)); + })); + + assertTrue(Maps.deepEquals(map, mapCopy)); + + final Map> mapModified = mapCopy; + if (mapModified.isEmpty()) { + mapModified.put(keyGenerator.get(), mapValueGenerator.get()); + } else { + if (randomBoolean()) { + final String randomKey = mapModified.keySet().toArray(new String[0])[randomInt(mapModified.size() - 1)]; + final Map value = mapModified.get(randomKey); + mapModified.put( + randomKey, + randomValueOtherThanMany((v) -> Arrays.equals(v.get("nested"), value.get("nested")), mapValueGenerator) + ); + } else { + mapModified.put(randomValueOtherThanMany(mapModified::containsKey, keyGenerator), mapValueGenerator.get()); + } + } + + assertFalse(Maps.deepEquals(map, mapModified)); + } + public void testCollectToUnmodifiableSortedMap() { SortedMap canadianProvinces = Stream.of( new Tuple<>("ON", "Ontario"), diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java index 7b581881a7c71..c2aff7c6b82bb 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -36,7 +35,7 @@ public class BucketsAggregatorTests extends AggregatorTestCase { private List toRelease = new ArrayList<>(); @Override - protected AggregationContext createAggregationContext(IndexSearcher indexSearcher, Query query, MappedFieldType... fieldTypes) + protected AggregationContext createAggregationContext(IndexReader indexSearcher, Query query, MappedFieldType... fieldTypes) throws IOException { AggregationContext context = super.createAggregationContext(indexSearcher, query, fieldTypes); // Generally, we should avoid doing this, but this test doesn't do anything with reduction, so it should be safe here @@ -59,10 +58,8 @@ public BucketsAggregator buildMergeAggregator() throws IOException { } try (IndexReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newSearcher(indexReader); - AggregationContext context = createAggregationContext( - indexSearcher, + indexReader, null, new NumberFieldMapper.NumberFieldType("test", NumberFieldMapper.NumberType.INTEGER) ); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index dfa4cfc8ce2f2..b3488f31ebcb4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -24,7 +24,6 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldExistsQuery; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; @@ -3103,10 +3102,9 @@ public void testParentFactoryValidation() throws Exception { indexWriter.addDocument(document); } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); try ( AggregationContext context = createAggregationContext( - indexSearcher, + indexReader, new MatchAllDocsQuery(), keywordField("term-field"), longField("time") @@ -3680,10 +3678,9 @@ private void execu } } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); for (int i = 0; i < create.size(); i++) { verify.get(i) - .accept(searchAndReduce(indexSearcher, new AggTestConfig(create.get(i).get(), FIELD_TYPES).withQuery(query))); + .accept(searchAndReduce(indexReader, new AggTestConfig(create.get(i).get(), FIELD_TYPES).withQuery(query))); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java index 8ea8995ccc0d0..63b69cc86c8da 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java @@ -106,7 +106,6 @@ public void testEmpty() throws Exception { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); int numFilters = randomIntBetween(1, 10); QueryBuilder[] filters = new QueryBuilder[numFilters]; for (int i = 0; i < filters.length; i++) { @@ -114,7 +113,7 @@ public void testEmpty() throws Exception { } FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", filters); builder.otherBucketKey("other"); - InternalFilters response = searchAndReduce(indexSearcher, new AggTestConfig(builder, new KeywordFieldType("field"))); + InternalFilters response = searchAndReduce(indexReader, new AggTestConfig(builder, new KeywordFieldType("field"))); assertEquals(response.getBuckets().size(), numFilters); for (InternalFilters.InternalBucket filter : response.getBuckets()) { assertEquals(filter.getDocCount(), 0); @@ -198,7 +197,6 @@ public void testKeyedFilter() throws Exception { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); FiltersAggregator.KeyedFilter[] keys = new FiltersAggregator.KeyedFilter[6]; keys[0] = new FiltersAggregator.KeyedFilter("foobar", QueryBuilders.termQuery("field", "foobar")); @@ -211,7 +209,7 @@ public void testKeyedFilter() throws Exception { FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", keys); builder.otherBucket(true); builder.otherBucketKey("other"); - final InternalFilters filters = searchAndReduce(indexSearcher, new AggTestConfig(builder, new KeywordFieldType("field"))); + final InternalFilters filters = searchAndReduce(indexReader, new AggTestConfig(builder, new KeywordFieldType("field"))); assertEquals(filters.getBuckets().size(), 7); assertEquals(filters.getBucketByKey("foobar").getDocCount(), 2); assertEquals(filters.getBucketByKey("foo").getDocCount(), 2); @@ -246,7 +244,6 @@ public void testRandom() throws Exception { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); try { int numFilters = randomIntBetween(1, 10); QueryBuilder[] filters = new QueryBuilder[numFilters]; @@ -266,7 +263,7 @@ public void testRandom() throws Exception { builder.otherBucket(true); builder.otherBucketKey("other"); - final InternalFilters response = searchAndReduce(indexSearcher, new AggTestConfig(builder, new KeywordFieldType("field"))); + final InternalFilters response = searchAndReduce(indexReader, new AggTestConfig(builder, new KeywordFieldType("field"))); List buckets = response.getBuckets(); assertEquals(buckets.size(), filters.length + 1); @@ -420,6 +417,7 @@ public void testRangeFilter() throws IOException { ) ); }, + null, ft ); }); @@ -441,8 +439,8 @@ public void testPhraseFilter() throws IOException { List.of(new Field("test", "will find me", TextFieldMapper.Defaults.FIELD_TYPE)) ) ); - }, searcher -> { - searcher.setQueryCachingPolicy(new QueryCachingPolicy() { + }, indexReader -> { + QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() { @Override public boolean shouldCache(Query query) throws IOException { return true; @@ -450,12 +448,12 @@ public boolean shouldCache(Query query) throws IOException { @Override public void onUse(Query query) {} - }); + }; for (Matcher segmentsCountedInConstantTime : List.of(equalTo(0), greaterThanOrEqualTo(1))) { debugTestCase( builder, new MatchAllDocsQuery(), - searcher, + indexReader, (InternalFilters filters, Class impl, Map> debug) -> { assertThat(filters.getBuckets(), hasSize(1)); assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(1L)); @@ -479,6 +477,7 @@ public void onUse(Query query) {} ) ); }, + queryCachingPolicy, ft ); } @@ -662,7 +661,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} bitsetFilterCache, LongPoint.newRangeQuery("t", 5, Long.MAX_VALUE) ); - IndexSearcher searcher = newIndexSearcher(limitedReader); + IndexSearcher searcher = newSearcher(limitedReader); int segmentsWithLiveDocs = (int) searcher.getIndexReader() .leaves() .stream() @@ -672,7 +671,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} debugTestCase( builder, new MatchAllDocsQuery(), - searcher, + limitedReader, (InternalFilters filters, Class impl, Map> debug) -> { assertThat(filters.getBuckets(), hasSize(1)); assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(5L)); @@ -693,7 +692,8 @@ public void onCache(ShardId shardId, Accountable accountable) {} ) ) ); - } + }, + null ); } } @@ -735,7 +735,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} bitsetFilterCache, LongPoint.newRangeQuery("t", 5, Long.MAX_VALUE) ); - IndexSearcher searcher = newIndexSearcher(limitedReader); + IndexSearcher searcher = newSearcher(limitedReader); int segmentsWithLiveDocs = (int) searcher.getIndexReader() .leaves() .stream() @@ -745,7 +745,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} debugTestCase( builder, new MatchAllDocsQuery(), - searcher, + limitedReader, (InternalFilters filters, Class impl, Map> debug) -> { assertThat(filters.getBuckets(), hasSize(1)); assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(5L)); @@ -767,6 +767,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} ) ); }, + null, ft ); } @@ -805,7 +806,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} bitsetFilterCache, LongPoint.newRangeQuery("t", Long.MIN_VALUE, Long.MAX_VALUE) ); - IndexSearcher searcher = newIndexSearcher(limitedReader); + IndexSearcher searcher = newSearcher(limitedReader); int segmentsWithLiveDocs = (int) searcher.getIndexReader() .leaves() .stream() @@ -816,7 +817,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} debugTestCase( builder, new MatchAllDocsQuery(), - searcher, + limitedReader, (InternalFilters filters, Class impl, Map> debug) -> { assertThat(filters.getBuckets(), hasSize(1)); assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(10L)); @@ -838,6 +839,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} ) ); }, + null, ft ); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index 2b70e906ebe11..0c08324994bc7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -16,7 +16,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -1052,7 +1051,7 @@ private void aggregationImplementationChoiceTestCase( } try ( IndexReader reader = indexWriter.getReader(); - AggregationContext context = createAggregationContext(newSearcher(reader), new MatchAllDocsQuery(), ft) + AggregationContext context = createAggregationContext(reader, new MatchAllDocsQuery(), ft) ) { Aggregator agg = createAggregator(builder, context); Matcher matcher = instanceOf(DateHistogramAggregator.FromDateRange.class); @@ -1171,15 +1170,13 @@ private void testSearchCase( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateHistogramAggregationBuilder aggregationBuilder = new DateHistogramAggregationBuilder("_name"); if (configure != null) { configure.accept(aggregationBuilder); } InternalDateHistogram histogram = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, fieldType).withMaxBuckets(maxBucket).withQuery(query) ); verify.accept(histogram); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java index 92d91152948ea..d525023a21360 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -127,8 +126,7 @@ public void testUnsupportedRangeType() throws Exception { MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - expectThrows(IllegalArgumentException.class, () -> searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType))); + expectThrows(IllegalArgumentException.class, () -> searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType))); } } } @@ -1082,10 +1080,8 @@ private void testCase( indexWriter.close(); try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - InternalDateHistogram histogram = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, fieldType).withQuery(query) ); verify.accept(histogram); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java index 53b5a51ce0b69..0af4a8a7ca54d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; @@ -50,8 +49,7 @@ public void testLongs() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, longField("field"))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, longField("field"))); assertEquals(6, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -80,8 +78,7 @@ public void testDoubles() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, doubleField("field"))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, doubleField("field"))); assertEquals(6, histogram.getBuckets().size()); assertEquals(6, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); @@ -130,8 +127,7 @@ public void testDates() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(fieldName) .interval(1000 * 60 * 60 * 24); try (IndexReader reader = indexWriter.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } @@ -147,8 +143,7 @@ public void testIrrationalInterval() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(Math.PI); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, longField("field"))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, longField("field"))); assertEquals(6, histogram.getBuckets().size()); assertEquals(-4 * Math.PI, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); @@ -177,8 +172,7 @@ public void testMinDocCount() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(10).minDocCount(2); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, longField("field"))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, longField("field"))); assertEquals(2, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -200,8 +194,7 @@ public void testMissing() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5).missing(2d); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, longField("field"))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, longField("field"))); assertEquals(6, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -229,8 +222,7 @@ public void testMissingUnmappedField() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5).missing(2d); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder)); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder)); assertEquals(1, histogram.getBuckets().size()); @@ -253,10 +245,9 @@ public void testMissingUnmappedFieldBadType() throws Exception { .interval(5) .missing(missingValue); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); Throwable t = expectThrows( IllegalArgumentException.class, - () -> { searchAndReduce(searcher, new AggTestConfig(aggBuilder)); } + () -> { searchAndReduce(reader, new AggTestConfig(aggBuilder)); } ); // This throws a number format exception (which is a subclass of IllegalArgumentException) and might be ok? assertThat(t.getMessage(), containsString(missingValue)); @@ -274,11 +265,10 @@ public void testIncorrectFieldType() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); expectThrows( IllegalArgumentException.class, - () -> { searchAndReduce(searcher, new AggTestConfig(aggBuilder, keywordField("field"))); } + () -> { searchAndReduce(reader, new AggTestConfig(aggBuilder, keywordField("field"))); } ); } } @@ -295,8 +285,7 @@ public void testOffset() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5).offset(Math.PI); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, doubleField("field"))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, doubleField("field"))); assertEquals(4, histogram.getBuckets().size()); assertEquals(-10 + Math.PI, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -327,8 +316,7 @@ public void testRandomOffset() throws Exception { .interval(interval) .offset(offset); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, doubleField("field"))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, doubleField("field"))); assertEquals(4, histogram.getBuckets().size()); assertEquals(-10 + expectedOffset, histogram.getBuckets().get(0).getKey()); @@ -361,8 +349,7 @@ public void testExtendedBounds() throws Exception { .extendedBounds(-12, 13); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, doubleField("field"))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, doubleField("field"))); assertEquals(6, histogram.getBuckets().size()); assertEquals(-15d, histogram.getBuckets().get(0).getKey()); assertEquals(0, histogram.getBuckets().get(0).getDocCount()); @@ -394,8 +381,7 @@ public void testHardBounds() throws Exception { .hardBounds(new DoubleBounds(0.0, 10.0)); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertEquals(1, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java index d9f55d066ba2a..95661fd24c49e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; @@ -55,8 +54,7 @@ public void testDoubles() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); @@ -101,8 +99,7 @@ public void testLongs() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); @@ -147,8 +144,7 @@ public void testMultipleRanges() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); @@ -194,8 +190,7 @@ public void testMultipleRangesLotsOfOverlap() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); assertEquals(3, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); @@ -228,8 +223,7 @@ public void testLongsIrrationalInterval() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(Math.PI); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); assertEquals(6, histogram.getBuckets().size()); assertEquals(-1 * Math.PI, histogram.getBuckets().get(0).getKey()); @@ -271,8 +265,7 @@ public void testMinDocCount() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5).minDocCount(2); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); assertEquals(2, histogram.getBuckets().size()); assertEquals(5d, histogram.getBuckets().get(0).getKey()); @@ -302,8 +295,7 @@ public void testOffset() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5).offset(4); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); assertEquals(8, histogram.getBuckets().size()); assertEquals(-6d, histogram.getBuckets().get(0).getKey()); @@ -359,8 +351,7 @@ public void testOffsetGtInterval() throws Exception { .offset(offset); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d + expectedOffset, histogram.getBuckets().get(0).getKey()); @@ -408,9 +399,8 @@ public void testIpRangesUnsupported() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); Exception e = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(searcher, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); + searchAndReduce(reader, new AggTestConfig(aggBuilder, rangeField("field", rangeType))); }); assertThat(e.getMessage(), equalTo("Expected numeric range type but found non-numeric range [ip_range]")); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java index 32530e440f387..bbeeb855f8d18 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -609,8 +608,6 @@ private void testSearchCase( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - final IndexSearcher indexSearcher = newIndexSearcher(indexReader); - final VariableWidthHistogramAggregationBuilder aggregationBuilder = new VariableWidthHistogramAggregationBuilder("_name"); if (configure != null) { configure.accept(aggregationBuilder); @@ -628,7 +625,7 @@ private void testSearchCase( } final InternalVariableWidthHistogram histogram = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, fieldType).withQuery(query) ); verify.accept(histogram); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java index 8d1c7c3088de3..0f9cfddef8a5c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -321,12 +320,8 @@ private void testCase( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - final IndexSearcher indexSearcher = newIndexSearcher(indexReader); final MappedFieldType[] fieldTypesArray = fieldTypes.toArray(new MappedFieldType[0]); - final InternalMissing missing = searchAndReduce( - indexSearcher, - new AggTestConfig(builder, fieldTypesArray).withQuery(query) - ); + final InternalMissing missing = searchAndReduce(indexReader, new AggTestConfig(builder, fieldTypesArray).withQuery(query)); verify.accept(missing); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 6324d40525cf6..62463dd9a2548 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -146,7 +146,7 @@ public void testNoDocs() throws IOException { nestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(nestedBuilder, fieldType)); + InternalNested nested = searchAndReduce(indexReader, new AggTestConfig(nestedBuilder, fieldType)); assertEquals(NESTED_AGG, nested.getName()); assertEquals(0, nested.getDocCount()); @@ -189,7 +189,7 @@ public void testSingleNestingMax() throws IOException { nestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(nestedBuilder, fieldType)); + InternalNested nested = searchAndReduce(indexReader, new AggTestConfig(nestedBuilder, fieldType)); assertEquals(expectedNestedDocs, nested.getDocCount()); assertEquals(NESTED_AGG, nested.getName()); @@ -239,7 +239,7 @@ public void testDoubleNestingMax() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(nestedBuilder, fieldType)); + InternalNested nested = searchAndReduce(indexReader, new AggTestConfig(nestedBuilder, fieldType)); assertEquals(expectedNestedDocs, nested.getDocCount()); assertEquals(NESTED_AGG, nested.getName()); @@ -290,7 +290,7 @@ public void testOrphanedDocs() throws IOException { nestedBuilder.subAggregation(sumAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(nestedBuilder, fieldType)); + InternalNested nested = searchAndReduce(indexReader, new AggTestConfig(nestedBuilder, fieldType)); assertEquals(expectedNestedDocs, nested.getDocCount()); assertEquals(NESTED_AGG, nested.getName()); @@ -371,7 +371,7 @@ public void testResetRootDocId() throws Exception { bq.add(new TermQuery(new Term(IdFieldMapper.NAME, Uid.encodeId("2"))), BooleanClause.Occur.MUST_NOT); InternalNested nested = searchAndReduce( - newIndexSearcher(indexReader), + indexReader, new AggTestConfig(nestedBuilder, fieldType).withQuery(new ConstantScoreQuery(bq.build())) ); @@ -410,7 +410,7 @@ public void testNestedOrdering() throws IOException { nestedBuilder.subAggregation(maxAgg); termsBuilder.subAggregation(nestedBuilder); - Terms terms = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(termsBuilder, fieldType1, fieldType2)); + Terms terms = searchAndReduce(indexReader, new AggTestConfig(termsBuilder, fieldType1, fieldType2)); assertEquals(7, terms.getBuckets().size()); assertEquals("authors", terms.getName()); @@ -459,7 +459,7 @@ public void testNestedOrdering() throws IOException { nestedBuilder.subAggregation(maxAgg); termsBuilder.subAggregation(nestedBuilder); - terms = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(termsBuilder, fieldType1, fieldType2)); + terms = searchAndReduce(indexReader, new AggTestConfig(termsBuilder, fieldType1, fieldType2)); assertEquals(7, terms.getBuckets().size()); assertEquals("authors", terms.getName()); @@ -546,7 +546,7 @@ public void testNestedOrdering_random() throws IOException { termsBuilder.subAggregation(nestedBuilder); AggTestConfig aggTestConfig = new AggTestConfig(termsBuilder, fieldType1, fieldType2); - Terms terms = searchAndReduce(newIndexSearcher(indexReader), aggTestConfig); + Terms terms = searchAndReduce(indexReader, aggTestConfig); assertEquals(books.size(), terms.getBuckets().size()); assertEquals("authors", terms.getName()); @@ -642,7 +642,7 @@ public void testPreGetChildLeafCollectors() throws IOException { MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType("value"); Filter filter = searchAndReduce( - newIndexSearcher(indexReader), + indexReader, new AggTestConfig(filterAggregationBuilder, fieldType1, fieldType2).withQuery( Queries.newNonNestedFilter(IndexVersion.current()) ) @@ -705,8 +705,8 @@ public void testFieldAlias() throws IOException { max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias") ); - InternalNested nested = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(agg, fieldType)); - Nested aliasNested = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aliasAgg, fieldType)); + InternalNested nested = searchAndReduce(indexReader, new AggTestConfig(agg, fieldType)); + Nested aliasNested = searchAndReduce(indexReader, new AggTestConfig(aliasAgg, fieldType)); assertEquals(nested, aliasNested); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -754,7 +754,7 @@ public void testNestedWithPipeline() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(nestedBuilder, fieldType)); + InternalNested nested = searchAndReduce(indexReader, new AggTestConfig(nestedBuilder, fieldType)); assertEquals(expectedNestedDocs, nested.getDocCount()); assertEquals(NESTED_AGG, nested.getName()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index a34aa91d5d77d..9c908f13d90bc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -72,7 +72,7 @@ public void testNoDocs() throws IOException { reverseNestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - Nested nested = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(nestedBuilder, fieldType)); + Nested nested = searchAndReduce(indexReader, new AggTestConfig(nestedBuilder, fieldType)); ReverseNested reverseNested = (ReverseNested) ((InternalAggregation) nested).getProperty(REVERSE_AGG_NAME); assertEquals(REVERSE_AGG_NAME, reverseNested.getName()); assertEquals(0, reverseNested.getDocCount()); @@ -125,7 +125,7 @@ public void testMaxFromParentDocs() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); AggTestConfig aggTestConfig = new AggTestConfig(nestedBuilder, fieldType); - Nested nested = searchAndReduce(newIndexSearcher(indexReader), aggTestConfig); + Nested nested = searchAndReduce(indexReader, aggTestConfig); assertEquals(expectedNestedDocs, nested.getDocCount()); ReverseNested reverseNested = (ReverseNested) ((InternalAggregation) nested).getProperty(REVERSE_AGG_NAME); @@ -184,8 +184,8 @@ public void testFieldAlias() throws IOException { reverseNested(REVERSE_AGG_NAME).subAggregation(aliasMaxAgg) ); - Nested nested = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(agg, fieldType)); - Nested aliasNested = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aliasAgg, fieldType)); + Nested nested = searchAndReduce(indexReader, new AggTestConfig(agg, fieldType)); + Nested aliasNested = searchAndReduce(indexReader, new AggTestConfig(aliasAgg, fieldType)); ReverseNested reverseNested = nested.getAggregations().get(REVERSE_AGG_NAME); ReverseNested aliasReverseNested = aliasNested.getAggregations().get(REVERSE_AGG_NAME); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java index fbc6575e8f7f0..f0844328609ed 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java @@ -15,7 +15,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -520,10 +519,8 @@ private void testCase( indexWriter.close(); try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - InternalRange> agg = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, fieldType).withQuery(query) ); verify.accept(agg); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java index b678b760c04d7..df356e2b51599 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; @@ -98,8 +97,7 @@ public void testRanges() throws Exception { } MappedFieldType fieldType = new IpFieldMapper.IpFieldType("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalBinaryRange range = searchAndReduce(searcher, new AggTestConfig(builder, fieldType)); + InternalBinaryRange range = searchAndReduce(reader, new AggTestConfig(builder, fieldType)); assertEquals(numRanges, range.getBuckets().size()); for (int i = 0; i < range.getBuckets().size(); i++) { Tuple expected = requestedRanges[i]; @@ -131,8 +129,7 @@ public void testMissingUnmapped() throws Exception { .addRange(new IpRangeAggregationBuilder.Range("foo", "192.168.100.0", "192.168.100.255")) .missing("192.168.100.42"); // Apparently we expect a string here try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalBinaryRange range = searchAndReduce(searcher, new AggTestConfig(builder)); + InternalBinaryRange range = searchAndReduce(reader, new AggTestConfig(builder)); assertEquals(1, range.getBuckets().size()); } } @@ -149,8 +146,7 @@ public void testMissingUnmappedBadType() throws Exception { .addRange(new IpRangeAggregationBuilder.Range("foo", "192.168.100.0", "192.168.100.255")) .missing(1234); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - expectThrows(IllegalArgumentException.class, () -> { searchAndReduce(searcher, new AggTestConfig(builder)); }); + expectThrows(IllegalArgumentException.class, () -> { searchAndReduce(reader, new AggTestConfig(builder)); }); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java index 1f8986cb50dbf..797ace3f2b37c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java @@ -16,7 +16,6 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -91,23 +90,22 @@ public void testDiversifiedSampler() throws Exception { writeBooks(indexWriter); indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); Consumer verify = result -> { Terms terms = result.getAggregations().get("terms"); assertEquals(2, terms.getBuckets().size()); assertEquals("0805080481", terms.getBuckets().get(0).getKeyAsString()); assertEquals("0812550706", terms.getBuckets().get(1).getKeyAsString()); }; - testCase(indexSearcher, genreFieldType, "map", verify); - testCase(indexSearcher, genreFieldType, "global_ordinals", verify); - testCase(indexSearcher, genreFieldType, "bytes_hash", verify); + testCase(indexReader, genreFieldType, "map", verify); + testCase(indexReader, genreFieldType, "global_ordinals", verify); + testCase(indexReader, genreFieldType, "bytes_hash", verify); genreFieldType = new NumberFieldMapper.NumberFieldType("genre_id", NumberFieldMapper.NumberType.LONG); - testCase(indexSearcher, genreFieldType, null, verify); + testCase(indexReader, genreFieldType, null, verify); // wrong field: genreFieldType = new KeywordFieldMapper.KeywordFieldType("wrong_field"); - testCase(indexSearcher, genreFieldType, null, result -> { + testCase(indexReader, genreFieldType, null, result -> { Terms terms = result.getAggregations().get("terms"); assertEquals(1, terms.getBuckets().size()); assertEquals("0805080481", terms.getBuckets().get(0).getKeyAsString()); @@ -123,7 +121,6 @@ public void testRidiculousSize() throws Exception { writeBooks(indexWriter); indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); MappedFieldType genreFieldType = new KeywordFieldMapper.KeywordFieldType("genre"); Consumer verify = result -> { @@ -133,31 +130,27 @@ public void testRidiculousSize() throws Exception { try { // huge shard_size - testCase(indexSearcher, genreFieldType, "map", verify, Integer.MAX_VALUE, 1); - testCase(indexSearcher, genreFieldType, "global_ordinals", verify, Integer.MAX_VALUE, 1); - testCase(indexSearcher, genreFieldType, "bytes_hash", verify, Integer.MAX_VALUE, 1); + testCase(indexReader, genreFieldType, "map", verify, Integer.MAX_VALUE, 1); + testCase(indexReader, genreFieldType, "global_ordinals", verify, Integer.MAX_VALUE, 1); + testCase(indexReader, genreFieldType, "bytes_hash", verify, Integer.MAX_VALUE, 1); // huge maxDocsPerValue - testCase(indexSearcher, genreFieldType, "map", verify, 100, Integer.MAX_VALUE); - testCase(indexSearcher, genreFieldType, "global_ordinals", verify, 100, Integer.MAX_VALUE); - testCase(indexSearcher, genreFieldType, "bytes_hash", verify, 100, Integer.MAX_VALUE); + testCase(indexReader, genreFieldType, "map", verify, 100, Integer.MAX_VALUE); + testCase(indexReader, genreFieldType, "global_ordinals", verify, 100, Integer.MAX_VALUE); + testCase(indexReader, genreFieldType, "bytes_hash", verify, 100, Integer.MAX_VALUE); } finally { indexReader.close(); directory.close(); } } - private void testCase( - IndexSearcher indexSearcher, - MappedFieldType genreFieldType, - String executionHint, - Consumer verify - ) throws IOException { - testCase(indexSearcher, genreFieldType, executionHint, verify, 100, 1); + private void testCase(IndexReader indexReader, MappedFieldType genreFieldType, String executionHint, Consumer verify) + throws IOException { + testCase(indexReader, genreFieldType, executionHint, verify, 100, 1); } private void testCase( - IndexSearcher indexSearcher, + IndexReader indexReader, MappedFieldType genreFieldType, String executionHint, Consumer verify, @@ -183,7 +176,7 @@ private void testCase( .shardSize(shardSize) .subAggregation(new TermsAggregationBuilder("terms").field("id")); - InternalSampler result = searchAndReduce(indexSearcher, new AggTestConfig(builder, genreFieldType, idFieldType).withQuery(query)); + InternalSampler result = searchAndReduce(indexReader, new AggTestConfig(builder, genreFieldType, idFieldType).withQuery(query)); verify.accept(result); } @@ -192,7 +185,6 @@ public void testDiversifiedSampler_noDocs() throws Exception { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newSearcher(indexReader); MappedFieldType idFieldType = new KeywordFieldMapper.KeywordFieldType("id"); @@ -201,7 +193,7 @@ public void testDiversifiedSampler_noDocs() throws Exception { DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name").field(genreFieldType.name()) .subAggregation(new TermsAggregationBuilder("terms").field("id")); - InternalSampler result = searchAndReduce(indexSearcher, new AggTestConfig(builder, genreFieldType, idFieldType)); + InternalSampler result = searchAndReduce(indexReader, new AggTestConfig(builder, genreFieldType, idFieldType)); Terms terms = result.getAggregations().get("terms"); assertEquals(0, terms.getBuckets().size()); indexReader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java index e82c46e99dd39..220c863def228 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java @@ -16,7 +16,6 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.elasticsearch.index.mapper.MappedFieldType; @@ -63,9 +62,8 @@ public void testSampler() throws IOException { .subAggregation(new MinAggregationBuilder("min").field("int")); try (DirectoryReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = newIndexSearcher(reader); InternalSampler sampler = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, textFieldType, numericFieldType).withQuery(new TermQuery(new Term("text", "good"))) ); Min min = sampler.getAggregations().get("min"); @@ -99,9 +97,8 @@ public void testRidiculousSize() throws IOException { .subAggregation(new MinAggregationBuilder("min").field("int")); try (DirectoryReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = newIndexSearcher(reader); InternalSampler sampler = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, textFieldType, numericFieldType).withQuery(new TermQuery(new Term("text", "good"))) ); Min min = sampler.getAggregations().get("min"); @@ -121,15 +118,13 @@ public void testEmptyParentBucket() throws Exception { writer.addDocument(new Document()); try (IndexReader reader = DirectoryReader.open(writer)) { - IndexSearcher searcher = newSearcher(reader); - QueryBuilder[] filters = new QueryBuilder[] { new MatchAllQueryBuilder(), new MatchNoneQueryBuilder() }; FiltersAggregationBuilder samplerParent = new FiltersAggregationBuilder("filters", filters); TermsAggregationBuilder samplerChild = new TermsAggregationBuilder("child").field("field"); SamplerAggregationBuilder sampler = new SamplerAggregationBuilder("sampler").subAggregation(samplerChild); samplerParent.subAggregation(sampler); - InternalFilters response = searchAndReduce(searcher, new AggTestConfig(samplerParent)); + InternalFilters response = searchAndReduce(reader, new AggTestConfig(samplerParent)); assertEquals(response.getBuckets().size(), 2); assertEquals(response.getBuckets().get(0).getDocCount(), 1); assertEquals(response.getBuckets().get(1).getDocCount(), 0); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java index e951900e4609e..368a0197ab137 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -143,8 +142,6 @@ private void testSearchCase( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name"); if (valueType != null) { aggregationBuilder.userValueTypeHint(valueType); @@ -156,7 +153,7 @@ private void testSearchCase( MappedFieldType binaryFieldType = new BinaryFieldMapper.BinaryFieldType(BINARY_FIELD); InternalMappedTerms rareTerms = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, binaryFieldType).withQuery(query) ); verify.accept(rareTerms); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java index 5d7084d30d788..8afea0d7fe214 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -119,8 +118,6 @@ private void testSearchCase( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name"); if (valueType != null) { aggregationBuilder.userValueTypeHint(valueType); @@ -130,7 +127,7 @@ private void testSearchCase( } InternalMappedTerms rareTerms = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, keywordFieldType).withQuery(query) ); verify.accept(rareTerms); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java index 7f0a5199f7e19..a48c764a94eb1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -153,8 +152,6 @@ private void testSearchCase( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name"); if (valueType != null) { aggregationBuilder.userValueTypeHint(valueType); @@ -166,7 +163,7 @@ private void testSearchCase( MappedFieldType longFieldType = new NumberFieldMapper.NumberFieldType(LONG_FIELD, NumberFieldMapper.NumberType.LONG); InternalMappedTerms rareTerms = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, longFieldType).withQuery(query) ); verify.accept(rareTerms); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index 088133f0e2de8..ad7a6c47ef5e4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -18,7 +18,6 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.FieldExistsQuery; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -241,11 +240,10 @@ public void testUnmapped() throws Exception { MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("another_long", NumberFieldMapper.NumberType.LONG); try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); String[] fieldNames = new String[] { "string", "long" }; for (int i = 0; i < fieldNames.length; i++) { RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name").field(fieldNames[i]); - RareTerms result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)); + RareTerms result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)); assertEquals("_name", result.getName()); assertEquals(0, result.getBuckets().size()); } @@ -272,11 +270,10 @@ public void testRangeField() throws Exception { MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType("field", rangeType); try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name").field("field"); expectThrows( IllegalArgumentException.class, - () -> searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)) + () -> searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)) ); } } @@ -350,7 +347,6 @@ public void testGlobalAggregationWithScore() throws IOException { document.add(new SortedDocValuesField("keyword", new BytesRef("e"))); indexWriter.addDocument(document); try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( new RareTermsAggregationBuilder("terms").field("keyword") .subAggregation( @@ -361,7 +357,7 @@ public void testGlobalAggregationWithScore() throws IOException { MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword"); - InternalGlobal result = searchAndReduce(indexSearcher, new AggTestConfig(globalBuilder, fieldType)); + InternalGlobal result = searchAndReduce(indexReader, new AggTestConfig(globalBuilder, fieldType)); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), equalTo(3)); for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { @@ -398,7 +394,7 @@ public void testWithNestedAggregations() throws IOException { try (DirectoryReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { AggTestConfig aggTestConfig = new AggTestConfig(nested, fieldType).withQuery(new FieldExistsQuery(PRIMARY_TERM_NAME)); // match root document only - InternalNested result = searchAndReduce(newIndexSearcher(indexReader), aggTestConfig); + InternalNested result = searchAndReduce(indexReader, aggTestConfig); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), equalTo(1)); assertThat(terms.getBuckets().get(0).getKeyAsString(), equalTo("8")); @@ -439,7 +435,7 @@ public void testWithNestedScoringAggregations() throws IOException { AggTestConfig aggTestConfig = new AggTestConfig(nested, fieldType).withQuery( new FieldExistsQuery(PRIMARY_TERM_NAME) ); - searchAndReduce(newIndexSearcher(indexReader), aggTestConfig); + searchAndReduce(indexReader, aggTestConfig); }); assertThat( e.getMessage(), @@ -454,7 +450,7 @@ public void testWithNestedScoringAggregations() throws IOException { new FieldExistsQuery(PRIMARY_TERM_NAME) ); // match root document only - InternalNested result = searchAndReduce(newIndexSearcher(indexReader), aggTestConfig); + InternalNested result = searchAndReduce(indexReader, aggTestConfig); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), equalTo(2)); long counter = 1; @@ -544,13 +540,11 @@ private A executeTestCase(Query query, List searchAndReduce(indexSearcher, new AggTestConfig(sigAgg, fieldType))); + expectThrows(IllegalArgumentException.class, () -> searchAndReduce(reader, new AggTestConfig(sigAgg, fieldType))); } } } @@ -409,14 +391,12 @@ public void testFieldAlias() throws IOException { try (DirectoryReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = newSearcher(reader, false); - SignificantTerms evenTerms = searchAndReduce( - searcher, + reader, new AggTestConfig(agg, textFieldType).withQuery(new TermQuery(new Term("text", "even"))) ); SignificantTerms aliasEvenTerms = searchAndReduce( - searcher, + reader, new AggTestConfig(aliasAgg, textFieldType).withQuery(new TermQuery(new Term("text", "even"))) ); @@ -424,11 +404,11 @@ public void testFieldAlias() throws IOException { assertEquals(evenTerms, aliasEvenTerms); SignificantTerms oddTerms = searchAndReduce( - searcher, + reader, new AggTestConfig(agg, textFieldType).withQuery(new TermQuery(new Term("text", "odd"))) ); SignificantTerms aliasOddTerms = searchAndReduce( - searcher, + reader, new AggTestConfig(aliasAgg, textFieldType).withQuery(new TermQuery(new Term("text", "odd"))) ); @@ -461,14 +441,12 @@ public void testFieldBackground() throws IOException { try (DirectoryReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = newSearcher(reader, false); - SignificantTerms evenTerms = searchAndReduce( - searcher, + reader, new AggTestConfig(agg, textFieldType).withQuery(new TermQuery(new Term("text", "even"))) ); SignificantTerms backgroundEvenTerms = searchAndReduce( - searcher, + reader, new AggTestConfig(backgroundAgg, textFieldType).withQuery(new TermQuery(new Term("text", "even"))) ); @@ -501,10 +479,9 @@ private void testAllDocsWithoutStringField(String executionHint) throws IOExcept try (RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { writer.addDocument(new Document()); try (DirectoryReader reader = maybeWrapReaderEs(writer.getReader())) { - IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f") .executionHint(executionHint); - SignificantStringTerms result = searchAndReduce(searcher, new AggTestConfig(request, keywordField("f"))); + SignificantStringTerms result = searchAndReduce(reader, new AggTestConfig(request, keywordField("f"))); assertThat(result.getSubsetSize(), equalTo(1L)); } } @@ -522,9 +499,8 @@ public void testAllDocsWithoutNumericField() throws IOException { try (RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { writer.addDocument(new Document()); try (DirectoryReader reader = maybeWrapReaderEs(writer.getReader())) { - IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f"); - SignificantLongTerms result = searchAndReduce(searcher, new AggTestConfig(request, longField("f"))); + SignificantLongTerms result = searchAndReduce(reader, new AggTestConfig(request, longField("f"))); assertThat(result.getSubsetSize(), equalTo(1L)); } } @@ -553,10 +529,9 @@ private void testSomeDocsWithoutStringField(String executionHint) throws IOExcep writer.flush(); writer.addDocument(new Document()); try (DirectoryReader reader = maybeWrapReaderEs(writer.getReader())) { - IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f") .executionHint(executionHint); - SignificantStringTerms result = searchAndReduce(searcher, new AggTestConfig(request, keywordField("f"))); + SignificantStringTerms result = searchAndReduce(reader, new AggTestConfig(request, keywordField("f"))); assertThat(result.getSubsetSize(), equalTo(2L)); } } @@ -576,9 +551,8 @@ public void testSomeDocsWithoutNumericField() throws IOException { writer.addDocument(d); writer.addDocument(new Document()); try (DirectoryReader reader = maybeWrapReaderEs(writer.getReader())) { - IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f"); - SignificantLongTerms result = searchAndReduce(searcher, new AggTestConfig(request, longField("f"))); + SignificantLongTerms result = searchAndReduce(reader, new AggTestConfig(request, longField("f"))); assertThat(result.getSubsetSize(), equalTo(2L)); } } @@ -608,7 +582,6 @@ private void threeLayerStringTestCase(String executionHint) throws IOException { } } try (DirectoryReader reader = maybeWrapReaderEs(writer.getReader())) { - IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder kRequest = new SignificantTermsAggregationBuilder("k").field("k") .minDocCount(0) .executionHint(executionHint); @@ -621,7 +594,7 @@ private void threeLayerStringTestCase(String executionHint) throws IOException { .executionHint(executionHint) .subAggregation(jRequest); SignificantStringTerms result = searchAndReduce( - searcher, + reader, new AggTestConfig(request, keywordField("i"), keywordField("j"), keywordField("k")) ); assertThat(result.getSubsetSize(), equalTo(1000L)); @@ -662,7 +635,6 @@ public void testThreeLayerLong() throws IOException { } } try (DirectoryReader reader = maybeWrapReaderEs(writer.getReader())) { - IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("i").field("i") .minDocCount(0) .subAggregation( @@ -671,7 +643,7 @@ public void testThreeLayerLong() throws IOException { .subAggregation(new SignificantTermsAggregationBuilder("k").field("k").minDocCount(0)) ); SignificantLongTerms result = searchAndReduce( - searcher, + reader, new AggTestConfig(request, longField("i"), longField("j"), longField("k")) ); assertThat(result.getSubsetSize(), equalTo(1000L)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java index b6f37b8932bdd..5f87e5b7b76d8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java @@ -17,7 +17,6 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; @@ -86,11 +85,9 @@ public void testSignificance() throws IOException { try (DirectoryReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = newIndexSearcher(reader); - // Search "odd" which should have no duplication InternalSampler sampler = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, textFieldType).withQuery(new TermQuery(new Term("text", "odd"))) ); SignificantTerms terms = sampler.getAggregations().get("sig_text"); @@ -102,7 +99,7 @@ public void testSignificance() throws IOException { // Search "even" which will have duplication sampler = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, textFieldType).withQuery(new TermQuery(new Term("text", "even"))) ); terms = sampler.getAggregations().get("sig_text"); @@ -137,8 +134,6 @@ public void testIncludeExcludes() throws IOException { try (DirectoryReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = newIndexSearcher(reader); - // Inclusive of values { SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "text").includeExclude( @@ -150,7 +145,7 @@ public void testIncludeExcludes() throws IOException { } // Search "even" which should have duplication InternalSampler sampler = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, textFieldType).withQuery(new TermQuery(new Term("text", "even"))) ); SignificantTerms terms = sampler.getAggregations().get("sig_text"); @@ -171,7 +166,7 @@ public void testIncludeExcludes() throws IOException { } // Search "even" which should have duplication InternalSampler sampler = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, textFieldType).withQuery(new TermQuery(new Term("text", "even"))) ); SignificantTerms terms = sampler.getAggregations().get("sig_text"); @@ -202,9 +197,8 @@ public void testMissingField() throws IOException { SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler").subAggregation(sigAgg); try (IndexReader reader = DirectoryReader.open(w)) { - IndexSearcher searcher = newSearcher(reader); InternalSampler sampler = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, textFieldType).withQuery(new TermQuery(new Term("text", "odd"))) ); SignificantTerms terms = sampler.getAggregations().get("sig_text"); @@ -233,17 +227,16 @@ public void testFieldAlias() throws IOException { try (DirectoryReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = newIndexSearcher(reader); SamplerAggregationBuilder samplerAgg = sampler("sampler").subAggregation(agg); SamplerAggregationBuilder aliasSamplerAgg = sampler("sampler").subAggregation(aliasAgg); InternalSampler sampler = searchAndReduce( - searcher, + reader, new AggTestConfig(samplerAgg, textFieldType).withQuery(new TermQuery(new Term("text", "odd"))) ); InternalSampler aliasSampler = searchAndReduce( - searcher, + reader, new AggTestConfig(aliasSamplerAgg, textFieldType).withQuery(new TermQuery(new Term("text", "odd"))) ); @@ -253,11 +246,11 @@ public void testFieldAlias() throws IOException { assertEquals(terms, aliasTerms); sampler = searchAndReduce( - searcher, + reader, new AggTestConfig(samplerAgg, textFieldType).withQuery(new TermQuery(new Term("text", "even"))) ); aliasSampler = searchAndReduce( - searcher, + reader, new AggTestConfig(aliasSamplerAgg, textFieldType).withQuery(new TermQuery(new Term("text", "even"))) ); @@ -286,9 +279,8 @@ public void testInsideTermsAgg() throws IOException { try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = newSearcher(reader); - StringTerms terms = searchAndReduce(searcher, new AggTestConfig(aggBuilder, textFieldType, keywordField("kwd"))); + StringTerms terms = searchAndReduce(reader, new AggTestConfig(aggBuilder, textFieldType, keywordField("kwd"))); SignificantTerms sigOdd = terms.getBucketByKey("odd").getAggregations().get("sig_text"); assertNull(sigOdd.getBucketByKey("even")); assertNull(sigOdd.getBucketByKey("duplicate")); @@ -348,8 +340,7 @@ public void testSignificanceOnTextArrays() throws IOException { sigAgg.sourceFieldNames(Arrays.asList(new String[] { "title", "text" })); try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = newSearcher(reader); - searchAndReduce(searcher, new AggTestConfig(sigAgg, textFieldType).withQuery(new TermQuery(new Term("text", "foo")))); + searchAndReduce(reader, new AggTestConfig(sigAgg, textFieldType).withQuery(new TermQuery(new Term("text", "foo")))); // No significant results to be found in this test - only checking we don't end up // with the internal exception discovered in issue https://github.com/elastic/elasticsearch/issues/25029 } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index d7bc8a059b9cb..bc607b25b7c5c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FieldExistsQuery; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -219,10 +218,9 @@ public void testUsesGlobalOrdinalsByDefault() throws Exception { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); // We do not use LuceneTestCase.newSearcher because we need a DirectoryReader - IndexSearcher indexSearcher = newSearcher(indexReader); MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("string"); - try (AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType)) { + try (AggregationContext context = createAggregationContext(indexReader, new MatchAllDocsQuery(), fieldType)) { TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.STRING) .field("string"); @@ -409,14 +407,14 @@ public void testDelaysSubAggs() throws Exception { } } } - }, searcher -> { + }, reader -> { /* * Use 200 max buckets rather than the default so we bump into it * really fast if we eagerly create the child buckets. It also * lets us create a fairly small test index. */ int maxBuckets = 200; - StringTerms result = searchAndReduce(searcher, new AggTestConfig(aggregationBuilder, s1ft, s2ft).withMaxBuckets(maxBuckets)); + StringTerms result = searchAndReduce(reader, new AggTestConfig(aggregationBuilder, s1ft, s2ft).withMaxBuckets(maxBuckets)); assertThat( result.getBuckets().stream().map(StringTerms.Bucket::getKey).collect(toList()), equalTo(List.of("b007", "b107", "b207", "b307", "b407", "b507", "b607", "b707", "b807", "b907", "b000")) @@ -700,7 +698,6 @@ public void testNumericIncludeExclude() throws Exception { document.add(new NumericDocValuesField("double_field", Double.doubleToRawLongBits(5.0))); indexWriter.addDocument(document); try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("long_field", NumberFieldMapper.NumberType.LONG); String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); @@ -709,7 +706,7 @@ public void testNumericIncludeExclude() throws Exception { .includeExclude(new IncludeExclude(null, null, new TreeSet<>(Set.of(new BytesRef("0"), new BytesRef("5"))), null)) .field("long_field") .order(BucketOrder.key(true)); - Terms result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + Terms result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertEquals(2, result.getBuckets().size()); assertEquals(0L, result.getBuckets().get(0).getKey()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); @@ -722,7 +719,7 @@ public void testNumericIncludeExclude() throws Exception { .includeExclude(new IncludeExclude(null, null, null, new TreeSet<>(Set.of(new BytesRef("0"), new BytesRef("5"))))) .field("long_field") .order(BucketOrder.key(true)); - result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertEquals(4, result.getBuckets().size()); assertEquals(1L, result.getBuckets().get(0).getKey()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); @@ -742,7 +739,7 @@ public void testNumericIncludeExclude() throws Exception { ) .field("double_field") .order(BucketOrder.key(true)); - result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertEquals(2, result.getBuckets().size()); assertEquals(0.0, result.getBuckets().get(0).getKey()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); @@ -757,7 +754,7 @@ public void testNumericIncludeExclude() throws Exception { ) .field("double_field") .order(BucketOrder.key(true)); - result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertEquals(4, result.getBuckets().size()); assertEquals(1.0, result.getBuckets().get(0).getKey()); @@ -920,7 +917,6 @@ private void termsAggregator( String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); logger.info("bucket_order={} size={} execution_hint={}", bucketOrder, size, executionHint); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(valueType) .executionHint(executionHint) .size(size) @@ -929,7 +925,7 @@ private void termsAggregator( .order(bucketOrder); Terms result = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, fieldType).withSplitLeavesIntoSeperateAggregators(false) ); assertEquals(size, result.getBuckets().size()); @@ -955,7 +951,7 @@ private void termsAggregator( .field("field") ); result = ((Filter) searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, fieldType, filterFieldType).withSplitLeavesIntoSeperateAggregators(false) )).getAggregations().get("_name2"); int expectedFilteredCounts = 0; @@ -1021,7 +1017,6 @@ private void termsAggregatorWithNestedMaxAgg( executionHint, collectionMode ); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(valueType) .executionHint(executionHint) .collectMode(collectionMode) @@ -1032,7 +1027,7 @@ private void termsAggregatorWithNestedMaxAgg( .subAggregation(AggregationBuilders.max("_max").field("value")); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.LONG); - Terms result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType, fieldType2)); + Terms result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType, fieldType2)); assertEquals(size, result.getBuckets().size()); for (int i = 0; i < size; i++) { Map.Entry expected = expectedBuckets.get(i); @@ -1051,20 +1046,19 @@ public void testEmpty() throws Exception { MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG); MappedFieldType fieldType3 = new NumberFieldMapper.NumberFieldType("double", NumberFieldMapper.NumberType.DOUBLE); try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.STRING) .field("string"); - Terms result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType1)); + Terms result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType1)); assertEquals("_name", result.getName()); assertEquals(0, result.getBuckets().size()); aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.LONG).field("long"); - result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType2)); + result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType2)); assertEquals("_name", result.getName()); assertEquals(0, result.getBuckets().size()); aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.DOUBLE).field("double"); - result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType3)); + result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType3)); assertEquals("_name", result.getName()); assertEquals(0, result.getBuckets().size()); } @@ -1076,13 +1070,12 @@ public void testUnmapped() throws Exception { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); ValueType[] valueTypes = new ValueType[] { ValueType.STRING, ValueType.LONG, ValueType.DOUBLE }; String[] fieldNames = new String[] { "string", "long", "double" }; for (int i = 0; i < fieldNames.length; i++) { TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(valueTypes[i]) .field(fieldNames[i]); - Terms result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder)); + Terms result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); assertEquals("_name", result.getName()); assertEquals(0, result.getBuckets().size()); assertFalse(AggregationInspectionHelper.hasValue((InternalTerms) result)); @@ -1104,7 +1097,6 @@ public void testUnmappedWithMissing() throws Exception { MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("unrelated_value"); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); ValueType[] valueTypes = new ValueType[] { ValueType.STRING, ValueType.LONG, ValueType.DOUBLE }; String[] fieldNames = new String[] { "string", "long", "double" }; Object[] missingValues = new Object[] { "abc", 19L, 19.2 }; @@ -1113,7 +1105,7 @@ public void testUnmappedWithMissing() throws Exception { TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(valueTypes[i]) .field(fieldNames[i]) .missing(missingValues[i]); - Terms result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType1)); + Terms result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType1)); assertEquals("_name", result.getName()); assertEquals(1, result.getBuckets().size()); assertEquals(missingValues[i], result.getBuckets().get(0).getKey()); @@ -1138,11 +1130,10 @@ public void testRangeField() throws Exception { indexWriter.addDocument(document); try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").field(fieldName); expectThrows( IllegalArgumentException.class, - () -> { searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); } + () -> { searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); } ); } } @@ -1159,11 +1150,10 @@ public void testGeoPointField() throws Exception { indexWriter.addDocument(document); try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").field(field); expectThrows( IllegalArgumentException.class, - () -> { searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); } + () -> { searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); } ); } } @@ -1215,7 +1205,6 @@ public void testNestedTermsAgg() throws Exception { document.addAll(doc(fieldType2, "f")); indexWriter.addDocument(document); try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name1").userValueTypeHint(ValueType.STRING) @@ -1230,7 +1219,7 @@ public void testNestedTermsAgg() throws Exception { .field("field2") .order(BucketOrder.key(true)) ); - Terms result = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)); + Terms result = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType1, fieldType2)); assertEquals(3, result.getBuckets().size()); assertEquals("a", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); @@ -1261,9 +1250,8 @@ public void testMixLongAndDouble() throws Exception { final Directory dir; try (IndexReader reader = createIndexWithLongs()) { dir = ((DirectoryReader) reader).directory(); - IndexSearcher searcher = newSearcher(reader); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); - aggs.add(buildInternalAggregation(aggregationBuilder, fieldType, searcher)); + aggs.add(buildInternalAggregation(aggregationBuilder, fieldType, reader)); } dir.close(); } @@ -1272,9 +1260,8 @@ public void testMixLongAndDouble() throws Exception { final Directory dir; try (IndexReader reader = createIndexWithDoubles()) { dir = ((DirectoryReader) reader).directory(); - IndexSearcher searcher = newSearcher(reader); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.DOUBLE); - aggs.add(buildInternalAggregation(aggregationBuilder, fieldType, searcher)); + aggs.add(buildInternalAggregation(aggregationBuilder, fieldType, reader)); } dir.close(); } @@ -1317,7 +1304,6 @@ public void testGlobalAggregationWithScore() throws IOException { document.add(new SortedDocValuesField("keyword", new BytesRef("e"))); indexWriter.addDocument(document); try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( @@ -1338,7 +1324,7 @@ public void testGlobalAggregationWithScore() throws IOException { MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword"); - InternalGlobal result = searchAndReduce(indexSearcher, new AggTestConfig(globalBuilder, fieldType)); + InternalGlobal result = searchAndReduce(indexReader, new AggTestConfig(globalBuilder, fieldType)); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), equalTo(3)); for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { @@ -1389,7 +1375,7 @@ public void testWithNestedAggregations() throws IOException { { // match root document only InternalNested result = searchAndReduce( - newIndexSearcher(indexReader), + indexReader, new AggTestConfig(nested, fieldType).withQuery(new FieldExistsQuery(PRIMARY_TERM_NAME)) ); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); @@ -1401,7 +1387,7 @@ public void testWithNestedAggregations() throws IOException { .subAggregation(nested); // match root document only InternalFilter result = searchAndReduce( - newIndexSearcher(indexReader), + indexReader, new AggTestConfig(filter, fieldType).withQuery(new FieldExistsQuery(PRIMARY_TERM_NAME)) ); InternalNested nestedResult = result.getAggregations().get("nested"); @@ -1436,7 +1422,7 @@ public void testHeisenpig() throws IOException { try (DirectoryReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { // match root document only StringTerms result = searchAndReduce( - newIndexSearcher(indexReader), + indexReader, new AggTestConfig(terms, animalFieldType, nestedFieldType).withQuery( Queries.newNonNestedFilter(IndexVersion.current()) ) @@ -1477,7 +1463,7 @@ public void testSortingWithNestedAggregations() throws IOException { try (DirectoryReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { // match root document only LongTerms result = searchAndReduce( - newIndexSearcher(indexReader), + indexReader, new AggTestConfig(terms, fieldType, nestedFieldType).withQuery(new FieldExistsQuery(PRIMARY_TERM_NAME)) ); assertThat(result.getBuckets().get(0).term, equalTo(3L)); @@ -1513,15 +1499,16 @@ public void testManySegmentsStillSingleton() throws IOException { ) ); }, - searcher -> debugTestCase( + reader -> debugTestCase( builder, new MatchAllDocsQuery(), - searcher, + reader, (LongTerms result, Class impl, Map> debug) -> { Map subDebug = debug.get("n.str"); assertThat(subDebug, hasEntry("segments_with_single_valued_ords", 2)); assertThat(subDebug, hasEntry("segments_with_multi_valued_ords", 0)); }, + null, nFt, strFt ) @@ -1649,7 +1636,6 @@ private void threeLayerStringTestCase(String executionHint) throws IOException { } } try (DirectoryReader reader = maybeWrapReaderEs(writer.getReader())) { - IndexSearcher searcher = newIndexSearcher(reader); TermsAggregationBuilder request = new TermsAggregationBuilder("i").field("i") .executionHint(executionHint) .subAggregation( @@ -1657,7 +1643,7 @@ private void threeLayerStringTestCase(String executionHint) throws IOException { .executionHint(executionHint) .subAggregation(new TermsAggregationBuilder("k").field("k").executionHint(executionHint)) ); - StringTerms result = searchAndReduce(searcher, new AggTestConfig(request, ift, jft, kft)); + StringTerms result = searchAndReduce(reader, new AggTestConfig(request, ift, jft, kft)); for (int i = 0; i < 10; i++) { StringTerms.Bucket iBucket = result.getBucketByKey(Integer.toString(i)); assertThat(iBucket.getDocCount(), equalTo(100L)); @@ -1692,15 +1678,11 @@ public void testThreeLayerLong() throws IOException { } } try (DirectoryReader reader = maybeWrapReaderEs(writer.getReader())) { - IndexSearcher searcher = newIndexSearcher(reader); TermsAggregationBuilder request = new TermsAggregationBuilder("i").field("i") .subAggregation( new TermsAggregationBuilder("j").field("j").subAggregation(new TermsAggregationBuilder("k").field("k")) ); - LongTerms result = searchAndReduce( - searcher, - new AggTestConfig(request, longField("i"), longField("j"), longField("k")) - ); + LongTerms result = searchAndReduce(reader, new AggTestConfig(request, longField("i"), longField("j"), longField("k"))); for (int i = 0; i < 10; i++) { LongTerms.Bucket iBucket = result.getBucketByKey(Integer.toString(i)); assertThat(iBucket.getDocCount(), equalTo(100L)); @@ -1740,8 +1722,6 @@ public void testOrderByPipelineAggregation() throws Exception { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - BucketScriptPipelineAggregationBuilder bucketScriptAgg = bucketScript("script", new Script("2.718")); TermsAggregationBuilder termsAgg = terms("terms").field("field") .userValueTypeHint(ValueType.STRING) @@ -1752,7 +1732,7 @@ public void testOrderByPipelineAggregation() throws Exception { AggregationExecutionException e = expectThrows( AggregationExecutionException.class, - () -> searchAndReduce(indexSearcher, new AggTestConfig(termsAgg, fieldType)) + () -> searchAndReduce(indexReader, new AggTestConfig(termsAgg, fieldType)) ); assertEquals( "Invalid aggregation order path [script]. The provided aggregation [script] " @@ -1840,14 +1820,12 @@ public void testOrderByCardinality() throws IOException { DirectoryReader unwrapped = DirectoryReader.open(directory); DirectoryReader indexReader = wrapDirectoryReader(unwrapped) ) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - AggTestConfig aggTestConfig = new AggTestConfig( aggregationBuilder, new NumberFieldType("a", NumberType.INTEGER), bIsString ? new KeywordFieldType("b") : new NumberFieldType("b", NumberType.INTEGER) ).withSplitLeavesIntoSeperateAggregators(false).withMaxBuckets(Integer.MAX_VALUE); - LongTerms terms = searchAndReduce(indexSearcher, aggTestConfig); + LongTerms terms = searchAndReduce(indexReader, aggTestConfig); assertThat( terms.getBuckets().stream().map(MultiBucketsAggregation.Bucket::getKey).collect(toList()), equalTo(List.of(9L, 8L, 7L)) @@ -2258,7 +2236,7 @@ private IndexReader createIndexWithDoubles() throws IOException { return DirectoryReader.open(directory); } - private InternalAggregation buildInternalAggregation(TermsAggregationBuilder builder, MappedFieldType fieldType, IndexSearcher searcher) + private InternalAggregation buildInternalAggregation(TermsAggregationBuilder builder, MappedFieldType fieldType, IndexReader reader) throws IOException { /* TermsAggregator aggregator = createAggregator(builder, searcher, fieldType); @@ -2267,7 +2245,7 @@ private InternalAggregation buildInternalAggregation(TermsAggregationBuilder bui aggregator.postCollection(); return aggregator.buildTopLevel(); */ - return searchAndReduce(searcher, new AggTestConfig(builder, fieldType)); + return searchAndReduce(reader, new AggTestConfig(builder, fieldType)); } private T reduce(AggregationBuilder builder, Aggregator agg, BigArrays bigArrays) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java index c4e8b7dd6c73c..f48e16c660a1e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java @@ -15,7 +15,6 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.FieldExistsQuery; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -500,9 +499,8 @@ public void testOrderByEmptyAggregation() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - Terms terms = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + Terms terms = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertNotNull(terms); List buckets = terms.getBuckets(); assertNotNull(buckets); @@ -558,12 +556,11 @@ public void testCacheAggregation() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("avg").field("value"); - InternalAvg avg = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + InternalAvg avg = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertEquals(5.5, avg.getValue(), 0); assertEquals("avg", avg.getName()); @@ -587,13 +584,12 @@ public void testScriptCaching() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("avg").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); - InternalAvg avg = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + InternalAvg avg = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertEquals(5.5, avg.getValue(), 0); assertEquals("avg", avg.getName()); @@ -602,7 +598,7 @@ public void testScriptCaching() throws IOException { aggregationBuilder = new AvgAggregationBuilder("avg").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, RANDOM_SCRIPT, Collections.emptyMap())); - avg = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType).withShouldBeCached(false)); + avg = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType).withShouldBeCached(false)); assertTrue(avg.getValue() >= 0.0); assertTrue(avg.getValue() <= 1.0); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java index 94ee804d5e86b..5bb5eef5310cf 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.ElasticsearchParseException; @@ -43,8 +42,7 @@ public void testEmpty() throws Exception { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -68,8 +66,7 @@ public void testUnmappedFieldWithDocs() throws Exception { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -100,8 +97,7 @@ public void testMissing() throws Exception { .wrapLongitude(false); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertThat(bounds.top, equalTo(lat)); assertThat(bounds.bottom, equalTo(lat)); assertThat(bounds.posLeft, equalTo(lon >= 0 ? lon : Double.POSITIVE_INFINITY)); @@ -125,9 +121,8 @@ public void testInvalidMissing() throws Exception { .missing("invalid") .wrapLongitude(false); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); ElasticsearchParseException exception = expectThrows(ElasticsearchParseException.class, () -> { - searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); }); assertThat(exception.getMessage(), startsWith("unsupported symbol")); } @@ -174,8 +169,7 @@ public void testRandom() throws Exception { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE)); assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE)); assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java index 48b63eeaee39f..4ba38ca8681ac 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.geo.GeoPoint; @@ -38,8 +37,7 @@ public void testEmpty() throws Exception { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -54,14 +52,12 @@ public void testUnmapped() throws Exception { document.add(new LatLonDocValuesField("field", 10, 10)); w.addDocument(document); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field"); - InternalGeoCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field"); - result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -81,10 +77,8 @@ public void testUnmappedWithMissing() throws Exception { document.add(new LatLonDocValuesField("field", 10, 10)); w.addDocument(document); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field"); - InternalGeoCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertEquals(result.centroid(), expectedCentroid); assertTrue(AggregationInspectionHelper.hasValue(result)); } @@ -148,8 +142,7 @@ private void assertCentroid(RandomIndexWriter w, GeoPoint expectedCentroid) thro MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg").field("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertEquals("my_agg", result.getName()); SpatialPoint centroid = result.centroid(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java index 73882bfb9a92b..9d200d8c2cf5a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; @@ -47,8 +46,7 @@ public void testEmpty() throws IOException { .method(PercentilesMethod.HDR); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = new MultiReader()) { - IndexSearcher searcher = newSearcher(reader); - PercentileRanks ranks = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + PercentileRanks ranks = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertFalse(ranks.iterator().hasNext()); assertFalse(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks) ranks)); } @@ -67,8 +65,7 @@ public void testSimple() throws IOException { .method(PercentilesMethod.HDR); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - PercentileRanks ranks = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + PercentileRanks ranks = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); Iterator rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java index 898279357bd61..50e653d7e5216 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java @@ -26,7 +26,6 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldExistsQuery; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -435,9 +434,8 @@ public void testSingleValuedFieldGetProperty() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - Global global = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + Global global = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertNotNull(global); assertEquals("global", global.getName()); assertEquals(10L, global.getDocCount()); @@ -647,9 +645,8 @@ public void testEmptyAggregation() throws Exception { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - Global global = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + Global global = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertNotNull(global); assertEquals("global", global.getName()); assertEquals(0L, global.getDocCount()); @@ -684,9 +681,8 @@ public void testOrderByEmptyAggregation() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - Terms terms = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + Terms terms = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertNotNull(terms); List buckets = terms.getBuckets(); assertNotNull(buckets); @@ -726,18 +722,17 @@ public void testEarlyTermination() throws Exception { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); MaxAggregationBuilder maxAggregationBuilder = new MaxAggregationBuilder("max").field("values"); ValueCountAggregationBuilder countAggregationBuilder = new ValueCountAggregationBuilder("count").field("values"); - try (AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType)) { + try (AggregationContext context = createAggregationContext(indexReader, new MatchAllDocsQuery(), fieldType)) { MaxAggregator maxAggregator = createAggregator(maxAggregationBuilder, context); ValueCountAggregator countAggregator = createAggregator(countAggregationBuilder, context); BucketCollector bucketCollector = MultiBucketCollector.wrap(true, List.of(maxAggregator, countAggregator)); bucketCollector.preCollection(); - indexSearcher.search(new MatchAllDocsQuery(), bucketCollector.asCollector()); + context.searcher().search(new MatchAllDocsQuery(), bucketCollector.asCollector()); bucketCollector.postCollection(); Max max = (Max) maxAggregator.buildAggregation(0L); @@ -772,11 +767,10 @@ public void testNestedEarlyTermination() throws Exception { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); try ( AggregationContext context = createAggregationContext( - indexSearcher, + indexReader, new MatchAllDocsQuery(), multiValuesfieldType, singleValueFieldType @@ -796,7 +790,7 @@ public void testNestedEarlyTermination() throws Exception { BucketCollector bucketCollector = MultiBucketCollector.wrap(true, List.of(maxAggregator, countAggregator, termsAggregator)); bucketCollector.preCollection(); - indexSearcher.search(new MatchAllDocsQuery(), bucketCollector.asCollector()); + context.searcher().search(new MatchAllDocsQuery(), bucketCollector.asCollector()); bucketCollector.postCollection(); Max max = (Max) maxAggregator.buildTopLevel(); @@ -840,12 +834,11 @@ public void testCacheAggregation() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value"); - Max max = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + Max max = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertEquals(10.0, max.value(), 0); assertEquals("max", max.getName()); @@ -870,13 +863,12 @@ public void testScriptCaching() throws Exception { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); - Max max = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + Max max = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertEquals(10.0, max.value(), 0); assertEquals("max", max.getName()); @@ -884,7 +876,7 @@ public void testScriptCaching() throws Exception { aggregationBuilder = new MaxAggregationBuilder("max").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, RANDOM_SCRIPT, Collections.emptyMap())); - max = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType).withShouldBeCached(false)); + max = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType).withShouldBeCached(false)); assertTrue(max.value() >= 0.0); assertTrue(max.value() <= 1.0); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java index dacd673687cb5..def58da97c7ca 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldExistsQuery; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -368,9 +367,7 @@ public void testSingleValuedFieldPartiallyUnmappedWithMissing() throws IOExcepti indexWriter.close(); try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - - Min min = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType)); + Min min = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType)); assertEquals(-19.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); } @@ -557,9 +554,7 @@ public void testCaching() throws IOException { indexWriter.close(); try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - - try (AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType)) { + try (AggregationContext context = createAggregationContext(indexReader, new MatchAllDocsQuery(), fieldType)) { createAggregator(aggregationBuilder, context); assertTrue(context.isCacheable()); } @@ -584,14 +579,12 @@ public void testScriptCaching() throws IOException { indexWriter.close(); try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - - try (AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType)) { + try (AggregationContext context = createAggregationContext(indexReader, new MatchAllDocsQuery(), fieldType)) { createAggregator(nonDeterministicAggregationBuilder, context); assertFalse(context.isCacheable()); } - try (AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType)) { + try (AggregationContext context = createAggregationContext(indexReader, new MatchAllDocsQuery(), fieldType)) { createAggregator(aggregationBuilder, context); assertTrue(context.isCacheable()); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java index d93c70dde398b..26b7945434c1b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java @@ -286,7 +286,7 @@ public void testNoDocs() throws IOException { try (DirectoryReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT_NOOP).reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aggregationBuilder)); + ScriptedMetric scriptedMetric = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); assertEquals(0, scriptedMetric.aggregation()); @@ -306,7 +306,7 @@ public void testScriptedMetricWithoutCombine() throws IOException { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).reduceScript(REDUCE_SCRIPT); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aggregationBuilder)); + searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); }); assertEquals(exception.getMessage(), "[combineScript] must not be null: [scriptedMetric]"); } @@ -325,7 +325,7 @@ public void testScriptedMetricWithoutReduce() throws IOException { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aggregationBuilder)); + searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); }); assertEquals(exception.getMessage(), "[reduceScript] must not be null: [scriptedMetric]"); } @@ -346,7 +346,7 @@ public void testScriptedMetricWithCombine() throws IOException { try (DirectoryReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT).reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aggregationBuilder)); + ScriptedMetric scriptedMetric = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); assertEquals(numDocs, scriptedMetric.aggregation()); @@ -371,7 +371,7 @@ public void testScriptedMetricWithCombineAccessesScores() throws IOException { .mapScript(MAP_SCRIPT_SCORE) .combineScript(COMBINE_SCRIPT_SCORE) .reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aggregationBuilder)); + ScriptedMetric scriptedMetric = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); // all documents have score of 1.0 @@ -396,7 +396,7 @@ public void testScriptParamsPassedThrough() throws IOException { .mapScript(MAP_SCRIPT_PARAMS) .combineScript(COMBINE_SCRIPT_PARAMS) .reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aggregationBuilder)); + ScriptedMetric scriptedMetric = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); // The result value depends on the script params. assertEquals(4896, scriptedMetric.aggregation()); @@ -423,10 +423,7 @@ public void testAggParamsPassedToReduceScript() throws IOException { .mapScript(MAP_SCRIPT_PARAMS) .combineScript(COMBINE_SCRIPT_PARAMS) .reduceScript(REDUCE_SCRIPT_PARAMS); - ScriptedMetric scriptedMetric = searchAndReduce( - newIndexSearcher(indexReader), - new AggTestConfig(aggregationBuilder).withMaxBuckets(0) - ); + ScriptedMetric scriptedMetric = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder).withMaxBuckets(0)); // The result value depends on the script params. assertEquals(4803, scriptedMetric.aggregation()); @@ -452,7 +449,7 @@ public void testConflictingAggAndScriptParams() throws IOException { .reduceScript(REDUCE_SCRIPT); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aggregationBuilder)); + searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); }); assertEquals( "Parameter name \"" + CONFLICTING_PARAM_NAME + "\" used in both aggregation and script parameters", @@ -475,7 +472,7 @@ public void testSelfReferencingAggStateAfterInit() throws IOException { .reduceScript(REDUCE_SCRIPT); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aggregationBuilder)); + searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); }); assertEquals("Iterable object is self-referencing itself (Scripted metric aggs init script)", ex.getMessage()); } @@ -498,7 +495,7 @@ public void testSelfReferencingAggStateAfterMap() throws IOException { .reduceScript(REDUCE_SCRIPT); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aggregationBuilder)); + searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); }); assertEquals("Iterable object is self-referencing itself (Scripted metric aggs map script)", ex.getMessage()); } @@ -518,7 +515,7 @@ public void testSelfReferencingAggStateAfterCombine() throws IOException { .reduceScript(REDUCE_SCRIPT); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(newIndexSearcher(indexReader), new AggTestConfig(aggregationBuilder)); + searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder)); }); assertEquals("Iterable object is self-referencing itself (Scripted metric aggs combine script)", ex.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java index b6931819613a2..ba6b272ae3263 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.MultiReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; @@ -220,9 +219,7 @@ public void testPartiallyUnmapped() throws IOException { IndexReader unmappedReader = unmappedWriter.getReader(); MultiReader multiReader = new MultiReader(mappedReader, unmappedReader) ) { - - final IndexSearcher searcher = newSearcher(multiReader); - final InternalStats stats = searchAndReduce(searcher, new AggTestConfig(builder, ft)); + final InternalStats stats = searchAndReduce(multiReader, new AggTestConfig(builder, ft)); assertEquals(expected.count, stats.getCount(), 0); assertEquals(expected.sum, stats.getSum(), TOLERANCE); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java index 2fc431cf86323..04f316ae15452 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; @@ -54,8 +53,7 @@ public void testEmpty() throws IOException { .method(PercentilesMethod.TDIGEST); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = new MultiReader()) { - IndexSearcher searcher = newSearcher(reader); - PercentileRanks ranks = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + PercentileRanks ranks = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); Percentile rank = ranks.iterator().next(); assertEquals(Double.NaN, rank.getPercent(), 0d); assertEquals(0.5, rank.getValue(), 0d); @@ -76,8 +74,7 @@ public void testSimple() throws IOException { .method(PercentilesMethod.TDIGEST); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - PercentileRanks ranks = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + PercentileRanks ranks = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); Iterator rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java index 929795fe35c78..7c4194e0db873 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -19,7 +19,6 @@ import org.apache.lucene.queryparser.classic.QueryParser; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -73,7 +72,6 @@ public void testNoResults() throws Exception { * Tests {@code top_hits} inside of {@code terms}. While not strictly a unit test this is a fairly common way to run {@code top_hits} * and serves as a good example of running {@code top_hits} inside of another aggregation. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98892") public void testInsideTerms() throws Exception { Aggregation result; if (randomBoolean()) { @@ -126,10 +124,7 @@ private Aggregation testCase(Query query, AggregationBuilder builder) throws IOE iw.close(); IndexReader indexReader = DirectoryReader.open(directory); - // We do not use LuceneTestCase.newSearcher because we need a DirectoryReader for "testInsideTerms" - IndexSearcher indexSearcher = newSearcher(indexReader); - - Aggregation result = searchAndReduce(indexSearcher, new AggTestConfig(builder, STRING_FIELD_TYPE).withQuery(query)); + Aggregation result = searchAndReduce(indexReader, new AggTestConfig(builder, STRING_FIELD_TYPE).withQuery(query)); indexReader.close(); directory.close(); return result; @@ -178,12 +173,11 @@ public void testSetScorer() throws Exception { IndexReader reader = DirectoryReader.open(w); w.close(); - IndexSearcher searcher = newSearcher(reader); Query query = new BooleanQuery.Builder().add(new TermQuery(new Term("string", "bar")), Occur.SHOULD) .add(new TermQuery(new Term("string", "baz")), Occur.SHOULD) .build(); AggregationBuilder agg = AggregationBuilders.topHits("top_hits"); - TopHits result = searchAndReduce(searcher, new AggTestConfig(agg, STRING_FIELD_TYPE).withQuery(query)); + TopHits result = searchAndReduce(reader, new AggTestConfig(agg, STRING_FIELD_TYPE).withQuery(query)); assertEquals(3, result.getHits().getTotalHits().value); reader.close(); directory.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java index 3bd9f96eb5df2..05fcb45c71ee9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -104,18 +103,16 @@ public void testSameAggNames() throws IOException { InternalAvg avgResult; InternalDateHistogram histogramResult; try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD, NumberFieldMapper.NumberType.LONG); avgResult = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(avgBuilder, fieldType, valueFieldType).withMaxBuckets(10000).withQuery(query) ); histogramResult = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(histo, fieldType, valueFieldType).withMaxBuckets(10000).withQuery(query) ); } @@ -165,14 +162,12 @@ public void testComplicatedBucketPath() throws IOException { InternalFilter filterResult; try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD, NumberFieldMapper.NumberType.LONG); MappedFieldType keywordField = keywordField(textField); filterResult = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(filterAggregationBuilder, fieldType, valueFieldType, keywordField).withQuery(query) ); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java index c0b042a41d927..13a33f1944a88 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -109,10 +108,8 @@ private void testCase( indexWriter.close(); try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - InternalFilters filters; - filters = searchAndReduce(indexSearcher, new AggTestConfig(aggregationBuilder, fieldType).withQuery(query)); + filters = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder, fieldType).withQuery(query)); verify.accept(filters); } } diff --git a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java index 22bb64d1fa86b..879d7555d6cfe 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java @@ -18,10 +18,12 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.transport.InboundDecoder.ChannelType; import java.io.IOException; import java.util.ArrayList; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.instanceOf; @@ -218,6 +220,94 @@ public void testDecodeHandshakeCompatibility() throws IOException { } + public void testClientChannelTypeFailsDecodingRequests() throws Exception { + String action = "test-request"; + long requestId = randomNonNegativeLong(); + if (randomBoolean()) { + final String headerKey = randomAlphaOfLength(10); + final String headerValue = randomAlphaOfLength(20); + if (randomBoolean()) { + threadContext.putHeader(headerKey, headerValue); + } else { + threadContext.addResponseHeader(headerKey, headerValue); + } + } + // a request + OutboundMessage message = new OutboundMessage.Request( + threadContext, + new TestRequest(randomAlphaOfLength(100)), + TransportHandshaker.REQUEST_HANDSHAKE_VERSION, + action, + requestId, + randomBoolean(), + randomFrom(Compression.Scheme.DEFLATE, Compression.Scheme.LZ4, null) + ); + + try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { + final BytesReference bytes = message.serialize(os); + try (InboundDecoder clientDecoder = new InboundDecoder(recycler, ChannelType.CLIENT)) { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> clientDecoder.decode(ReleasableBytesReference.wrap(bytes), ignored -> {}) + ); + assertThat(e.getMessage(), containsString("client channels do not accept inbound requests, only responses")); + } + // the same message will be decoded by a server or mixed decoder + try (InboundDecoder decoder = new InboundDecoder(recycler, randomFrom(ChannelType.SERVER, ChannelType.MIX))) { + final ArrayList fragments = new ArrayList<>(); + int bytesConsumed = decoder.decode(ReleasableBytesReference.wrap(bytes), fragments::add); + int totalHeaderSize = TcpHeader.headerSize(TransportVersion.current()) + bytes.getInt( + TcpHeader.VARIABLE_HEADER_SIZE_POSITION + ); + assertEquals(totalHeaderSize, bytesConsumed); + final Header header = (Header) fragments.get(0); + assertEquals(requestId, header.getRequestId()); + } + } + } + + public void testServerChannelTypeFailsDecodingResponses() throws Exception { + long requestId = randomNonNegativeLong(); + if (randomBoolean()) { + final String headerKey = randomAlphaOfLength(10); + final String headerValue = randomAlphaOfLength(20); + if (randomBoolean()) { + threadContext.putHeader(headerKey, headerValue); + } else { + threadContext.addResponseHeader(headerKey, headerValue); + } + } + // a response + OutboundMessage message = new OutboundMessage.Response( + threadContext, + new TestResponse(randomAlphaOfLength(100)), + TransportHandshaker.REQUEST_HANDSHAKE_VERSION, + requestId, + randomBoolean(), + randomFrom(Compression.Scheme.DEFLATE, Compression.Scheme.LZ4, null) + ); + + try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { + final BytesReference bytes = message.serialize(os); + try (InboundDecoder decoder = new InboundDecoder(recycler, ChannelType.SERVER)) { + final ReleasableBytesReference releasable1 = ReleasableBytesReference.wrap(bytes); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> decoder.decode(releasable1, ignored -> {})); + assertThat(e.getMessage(), containsString("server channels do not accept inbound responses, only requests")); + } + // the same message will be decoded by a client or mixed decoder + try (InboundDecoder decoder = new InboundDecoder(recycler, randomFrom(ChannelType.CLIENT, ChannelType.MIX))) { + final ArrayList fragments = new ArrayList<>(); + int bytesConsumed = decoder.decode(ReleasableBytesReference.wrap(bytes), fragments::add); + int totalHeaderSize = TcpHeader.headerSize(TransportVersion.current()) + bytes.getInt( + TcpHeader.VARIABLE_HEADER_SIZE_POSITION + ); + assertEquals(totalHeaderSize, bytesConsumed); + final Header header = (Header) fragments.get(0); + assertEquals(requestId, header.getRequestId()); + } + } + } + public void testCompressedDecode() throws IOException { boolean isRequest = randomBoolean(); String action = "test-request"; diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index c70f5ad9dfed4..eee0c1b05cdc8 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -249,10 +250,10 @@ protected List getSearchPlugins() { @Deprecated protected A createAggregator( AggregationBuilder aggregationBuilder, - IndexSearcher searcher, + IndexReader indexReader, MappedFieldType... fieldTypes ) throws IOException { - return createAggregator(aggregationBuilder, createAggregationContext(searcher, new MatchAllDocsQuery(), fieldTypes)); + return createAggregator(aggregationBuilder, createAggregationContext(indexReader, new MatchAllDocsQuery(), fieldTypes)); } protected A createAggregator(AggregationBuilder aggregationBuilder, AggregationContext context) @@ -282,7 +283,21 @@ protected A createAggregator(AggregatorFactories.Builder * Deprecated - this will be made private in a future update */ @Deprecated - protected AggregationContext createAggregationContext(IndexSearcher indexSearcher, Query query, MappedFieldType... fieldTypes) + protected AggregationContext createAggregationContext(IndexReader indexReader, Query query, MappedFieldType... fieldTypes) + throws IOException { + return createAggregationContext( + indexReader, + createIndexSettings(), + query, + new NoneCircuitBreakerService(), + AggregationBuilder.DEFAULT_PREALLOCATION * 5, // We don't know how many bytes to preallocate so we grab a hand full + DEFAULT_MAX_BUCKETS, + false, + fieldTypes + ); + } + + private AggregationContext createAggregationContext(IndexSearcher indexSearcher, Query query, MappedFieldType... fieldTypes) throws IOException { return createAggregationContext( indexSearcher, @@ -306,7 +321,29 @@ protected AggregationContext createAggregationContext(IndexSearcher indexSearche */ @Deprecated protected AggregationContext createAggregationContext( - IndexSearcher indexSearcher, + IndexReader indexReader, + IndexSettings indexSettings, + Query query, + CircuitBreakerService breakerService, + long bytesToPreallocate, + int maxBucket, + boolean isInSortOrderExecutionRequired, + MappedFieldType... fieldTypes + ) throws IOException { + return createAggregationContext( + newIndexSearcher(indexReader), + indexSettings, + query, + breakerService, + bytesToPreallocate, + maxBucket, + isInSortOrderExecutionRequired, + fieldTypes + ); + } + + private AggregationContext createAggregationContext( + IndexSearcher searcher, IndexSettings indexSettings, Query query, CircuitBreakerService breakerService, @@ -353,7 +390,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} parserConfig(), writableRegistry(), null, - indexSearcher, + searcher, System::currentTimeMillis, null, null, @@ -470,8 +507,9 @@ protected ScriptService getMockScriptService() { * It runs the aggregation as well using a circuit breaker that randomly throws {@link CircuitBreakingException} * in order to mak sure the implementation does not leak. */ - protected A searchAndReduce(IndexSearcher searcher, AggTestConfig aggTestConfig) + protected A searchAndReduce(IndexReader reader, AggTestConfig aggTestConfig) throws IOException { + IndexSearcher searcher = newIndexSearcher(reader); IndexSettings indexSettings = createIndexSettings(); // First run it to find circuit breaker leaks on the aggregator runWithCrankyCircuitBreaker(indexSettings, searcher, aggTestConfig); @@ -694,9 +732,7 @@ protected void tes DirectoryReader unwrapped = DirectoryReader.open(directory); DirectoryReader indexReader = wrapDirectoryReader(unwrapped) ) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - - V agg = searchAndReduce(indexSearcher, aggTestConfig); + V agg = searchAndReduce(indexReader, aggTestConfig); verify.accept(agg); verifyOutputFieldNames(aggTestConfig.builder(), agg); @@ -704,10 +740,8 @@ protected void tes } } - protected void withIndex( - CheckedConsumer buildIndex, - CheckedConsumer consume - ) throws IOException { + protected void withIndex(CheckedConsumer buildIndex, CheckedConsumer consume) + throws IOException { try (Directory directory = newDirectory()) { RandomIndexWriter iw = new RandomIndexWriter(random(), directory); buildIndex.accept(iw); @@ -716,14 +750,14 @@ protected void withIndex( DirectoryReader unwrapped = DirectoryReader.open(directory); DirectoryReader indexReader = wrapDirectoryReader(unwrapped) ) { - consume.accept(newIndexSearcher(indexReader)); + consume.accept(indexReader); } } } protected void withNonMergingIndex( CheckedConsumer buildIndex, - CheckedConsumer consume + CheckedConsumer consume ) throws IOException { try (Directory directory = newDirectory()) { RandomIndexWriter iw = new RandomIndexWriter( @@ -737,7 +771,7 @@ protected void withNonMergingIndex( DirectoryReader unwrapped = DirectoryReader.open(directory); DirectoryReader indexReader = wrapDirectoryReader(unwrapped) ) { - consume.accept(newIndexSearcher(indexReader)); + consume.accept(indexReader); } } } @@ -755,7 +789,7 @@ protected void debugTestCase( TriConsumer, Map>> verify, MappedFieldType... fieldTypes ) throws IOException { - withIndex(buildIndex, searcher -> debugTestCase(builder, query, searcher, verify, fieldTypes)); + withIndex(buildIndex, reader -> debugTestCase(builder, query, reader, verify, null, fieldTypes)); } /** @@ -767,11 +801,16 @@ protected void debugTestCase( protected void debugTestCase( AggregationBuilder aggregationBuilder, Query query, - IndexSearcher searcher, + IndexReader reader, TriConsumer, Map>> verify, + QueryCachingPolicy queryCachingPolicy, MappedFieldType... fieldTypes ) throws IOException { // Don't use searchAndReduce because we only want a single aggregator. + IndexSearcher searcher = newIndexSearcher(reader); + if (queryCachingPolicy != null) { + searcher.setQueryCachingPolicy(queryCachingPolicy); + } CircuitBreakerService breakerService = new NoneCircuitBreakerService(); AggregationContext context = createAggregationContext( searcher, @@ -915,16 +954,15 @@ protected static DirectoryReader wrapInMockESDirectoryReader(DirectoryReader dir } /** - * Creates a {@link ContextIndexSearcher} that supports concurrency running each segment in a different thread. It randomly - * sets the IndexSearcher to run on concurrent mode. + * Creates a {@link ContextIndexSearcher} that supports concurrency running each segment in a different thread. */ - protected IndexSearcher newIndexSearcher(DirectoryReader indexReader) throws IOException { + private IndexSearcher newIndexSearcher(IndexReader indexReader) throws IOException { return new ContextIndexSearcher( indexReader, IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), - randomBoolean(), + indexReader instanceof DirectoryReader ? randomBoolean() : false, // we can only wrap DirectoryReader instances this.threadPoolExecutor, this.threadPoolExecutor.getMaximumPoolSize(), 1 // forces multiple slices @@ -1042,7 +1080,6 @@ public void testSupportedFieldTypes() throws IOException { indexWriter.close(); try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, fieldName); ValuesSourceType vst = fieldToVST(fieldType); @@ -1050,7 +1087,7 @@ public void testSupportedFieldTypes() throws IOException { AssertionError failure = null; try { InternalAggregation internalAggregation = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggregationBuilder, fieldType) ); // We should make sure if the builder says it supports sampling, that the internal aggregations returned override diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 6ffe78ba493a2..144eb35d3f526 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -297,7 +297,7 @@ public void tearDown() throws Exception { } } - public void assertNumHandshakes(long expected, Transport transport) { + public static void assertNumHandshakes(long expected, Transport transport) { if (transport instanceof TcpTransport) { assertEquals(expected, ((TcpTransport) transport).getNumHandshakes()); } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/TestOutboundRequestMessage.java b/test/framework/src/main/java/org/elasticsearch/transport/TestOutboundRequestMessage.java new file mode 100644 index 0000000000000..bdef107cec4e4 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/transport/TestOutboundRequestMessage.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.concurrent.ThreadContext; + +import java.io.IOException; + +public class TestOutboundRequestMessage extends OutboundMessage.Request { + public TestOutboundRequestMessage( + ThreadContext threadContext, + Writeable message, + TransportVersion version, + String action, + long requestId, + boolean isHandshake, + Compression.Scheme compressionScheme + ) { + super(threadContext, message, version, action, requestId, isHandshake, compressionScheme); + + } + + @Override + public BytesReference serialize(RecyclerBytesStreamOutput bytesStream) throws IOException { + return super.serialize(bytesStream); + } +} diff --git a/x-pack/docs/en/security/configuring-stack-security.asciidoc b/x-pack/docs/en/security/configuring-stack-security.asciidoc index 7524fc3e70f45..9ecce465ae558 100644 --- a/x-pack/docs/en/security/configuring-stack-security.asciidoc +++ b/x-pack/docs/en/security/configuring-stack-security.asciidoc @@ -33,20 +33,17 @@ the `kibana` package distribution for your environment. [[stack-start-with-security]] === Start {es} and enroll {kib} with security enabled -. From the installation directory, start {es}. A password is generated for the -`elastic` user and output to the terminal, plus an enrollment token for -enrolling {kib}. +. From the installation directory, start {es}. + [source,shell] ---- bin/elasticsearch ---- + -TIP: You might need to scroll back a bit in the terminal to view the password -and enrollment token. +The command prints the `elastic` user password and an enrollment token for {kib}. -. Copy the generated password and enrollment token and save them in a secure -location. These values are shown only when you start {es} for the first time. +. Copy the generated `elastic` password and enrollment token. These credentials +are only shown when you start {es} for the first time. + [NOTE] ==== @@ -56,14 +53,20 @@ To generate new enrollment tokens for {kib} or {es} nodes, run the <> tool. These tools are available in the {es} `bin` directory. ==== ++ +We recommend storing the `elastic` password as an environment variable in your shell. Example: ++ +[source,sh] +---- +export ELASTIC_PASSWORD="your_password" +---- . (Optional) Open a new terminal and verify that you can connect to your {es} -cluster by making an authenticated call. Enter the password for the `elastic` -user when prompted: +cluster by making an authenticated call. + [source,shell] ---- -curl --cacert config/certs/http_ca.crt -u elastic https://localhost:9200 +curl --cacert config/certs/http_ca.crt -u elastic:$ELASTIC_PASSWORD https://localhost:9200 ---- // NOTCONSOLE diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java index 56cd0a2dae2ea..82dd240bfc5f6 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.analytics.aggregations.bucket.histogram; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.index.mapper.CustomTermFreqField; @@ -42,8 +41,7 @@ public void testHistograms() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME).interval(5); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); assertEquals(9, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); @@ -76,8 +74,7 @@ public void testMinDocCount() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME).interval(5).minDocCount(2); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); assertEquals(4, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); assertEquals(3, histogram.getBuckets().get(0).getDocCount()); @@ -105,8 +102,7 @@ public void testHistogramWithDocCountField() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME).interval(100); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); assertTrue(AggregationInspectionHelper.hasValue(histogram)); assertEquals(8, histogram.getBuckets().get(0).getDocCount()); } @@ -127,8 +123,7 @@ public void testRandomOffset() throws Exception { .offset(offset) .minDocCount(1); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); assertEquals(3, histogram.getBuckets().size()); assertEquals(-10 + expectedOffset, histogram.getBuckets().get(0).getKey()); @@ -155,8 +150,7 @@ public void testExtendedBounds() throws Exception { .interval(5) .extendedBounds(-12, 13); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); assertEquals(6, histogram.getBuckets().size()); assertEquals(-15d, histogram.getBuckets().get(0).getKey()); assertEquals(0, histogram.getBuckets().get(0).getDocCount()); @@ -187,8 +181,7 @@ public void testHardBounds() throws Exception { .interval(5) .hardBounds(new DoubleBounds(0.0, 5.0)); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); + InternalHistogram histogram = searchAndReduce(reader, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); assertEquals(1, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); assertEquals(4, histogram.getBuckets().get(0).getDocCount()); @@ -211,10 +204,8 @@ public void testSubAggs() throws Exception { .extendedBounds(-12, 13) .subAggregation(new TopHitsAggregationBuilder("top_hits")); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(searcher, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); + searchAndReduce(reader, new AggTestConfig(aggBuilder, defaultFieldType(FIELD_NAME))); }); assertEquals("Histogram aggregation on histogram fields does not support sub-aggregations", e.getMessage()); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java index 27fa350d11763..b6a198a8206e8 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java @@ -10,7 +10,6 @@ import org.apache.lucene.document.DoubleDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.index.mapper.CustomTermFreqField; @@ -65,16 +64,15 @@ public void testPercentilesAccuracy() throws Exception { .percentiles(steps); try (DirectoryReader reader = DirectoryReader.open(dir)) { - IndexSearcher searcher = newIndexSearcher(reader); RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg").field(HISTO_FIELD_NAME); RangeAggregationBuilder rawFieldAgg = new RangeAggregationBuilder("my_agg").field(RAW_FIELD_NAME); Percentiles rawPercentileResults = searchAndReduce( - searcher, + reader, new AggTestConfig(rawPercentiles, defaultFieldType(RAW_FIELD_NAME)) ); Percentiles aggregatedPercentileResults = searchAndReduce( - searcher, + reader, new AggTestConfig(aggregatedPercentiles, defaultFieldType(HISTO_FIELD_NAME)) ); aggBuilder.addUnboundedTo(aggregatedPercentileResults.percentile(steps[0])); @@ -91,11 +89,11 @@ public void testPercentilesAccuracy() throws Exception { rawFieldAgg.addUnboundedFrom(rawPercentileResults.percentile(steps[steps.length - 1])); InternalRange range = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, defaultFieldType(HISTO_FIELD_NAME)) ); InternalRange rawRange = searchAndReduce( - searcher, + reader, new AggTestConfig(rawFieldAgg, defaultFieldType(RAW_FIELD_NAME)) ); for (int j = 0; j < rawRange.getBuckets().size(); j++) { @@ -171,7 +169,6 @@ private void testRanges(List ranges, String name) throws docCount += generateDocs(w); } try (DirectoryReader reader = DirectoryReader.open(dir)) { - IndexSearcher searcher = newIndexSearcher(reader); RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg").field(HISTO_FIELD_NAME); RangeAggregationBuilder rawFieldAgg = new RangeAggregationBuilder("my_agg").field(RAW_FIELD_NAME); ranges.forEach(r -> { @@ -180,11 +177,11 @@ private void testRanges(List ranges, String name) throws }); InternalRange range = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, defaultFieldType(HISTO_FIELD_NAME)) ); InternalRange rawRange = searchAndReduce( - searcher, + reader, new AggTestConfig(rawFieldAgg, defaultFieldType(RAW_FIELD_NAME)) ); for (int j = 0; j < rawRange.getBuckets().size(); j++) { @@ -227,9 +224,8 @@ public void testOverlapping() throws Exception { .addRange(10, 20) .addUnboundedFrom(20); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); InternalRange range = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, defaultFieldType(HISTO_FIELD_NAME)) ); assertTrue(AggregationInspectionHelper.hasValue(range)); @@ -287,9 +283,8 @@ public void testNonOverlapping() throws Exception { .addRange(10, 20) .addUnboundedFrom(20); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); InternalRange range = searchAndReduce( - searcher, + reader, new AggTestConfig(aggBuilder, defaultFieldType(HISTO_FIELD_NAME)) ); assertTrue(AggregationInspectionHelper.hasValue(range)); @@ -320,9 +315,8 @@ public void testSubAggs() throws Exception { .addRange(-1.0, 3.0) .subAggregation(new TopHitsAggregationBuilder("top_hits")); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(searcher, new AggTestConfig(aggBuilder, defaultFieldType(HISTO_FIELD_NAME))); + searchAndReduce(reader, new AggTestConfig(aggBuilder, defaultFieldType(HISTO_FIELD_NAME))); }); assertEquals("Range aggregation on histogram fields does not support sub-aggregations", e.getMessage()); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java index 1a7458f011489..7f9992fe5fd2e 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -91,8 +90,7 @@ public void testSimple() throws IOException { .method(PercentilesMethod.HDR); MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", Collections.emptyMap()); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - PercentileRanks ranks = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + PercentileRanks ranks = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); Iterator rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java index 718f4df63895f..c721bc088188d 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java @@ -78,7 +78,7 @@ public void testSimple() throws IOException { .method(PercentilesMethod.TDIGEST); MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", Collections.emptyMap()); try (IndexReader reader = w.getReader()) { - PercentileRanks ranks = searchAndReduce(newSearcher(reader), new AggTestConfig(aggBuilder, fieldType)); + PercentileRanks ranks = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); Iterator rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java index 40c3f1ff44ca3..58d4c45ac1551 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java @@ -10,7 +10,6 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -150,13 +149,11 @@ private void executeTestCase( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(HISTO_FIELD); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); InternalAggregation histogram; - histogram = searchAndReduce(indexSearcher, new AggTestConfig(aggBuilder, fieldType, valueFieldType).withQuery(query)); + histogram = searchAndReduce(indexReader, new AggTestConfig(aggBuilder, fieldType, valueFieldType).withQuery(query)); verify.accept(histogram); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java index 89319401d7788..7adddbaeec9d5 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -63,14 +62,12 @@ protected void executeTestCase(int window, int shift, Query query, DateHistogram } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggBuilder.field()); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.DOUBLE); InternalDateHistogram histogram; histogram = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggBuilder, fieldType, valueFieldType).withMaxBuckets(1000).withQuery(query) ); for (int i = 0; i < histogram.getBuckets().size(); i++) { diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java index 7ab10a31464d2..c909dda3ea05b 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -63,14 +62,12 @@ protected void executeTestCase(int window, int shift, Query query, DateHistogram } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggBuilder.field()); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.DOUBLE); InternalDateHistogram histogram; histogram = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(aggBuilder, fieldType, valueFieldType).withMaxBuckets(1000).withQuery(query) ); for (int i = 0; i < histogram.getBuckets().size(); i++) { diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java index acdb1c34a0680..d6bd96600176a 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -161,9 +160,8 @@ private void testCase(ValuesSourceAggregationBuilder aggBuilder, Consumer buckets = terms.getBuckets(); assertNotNull(buckets); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java index 6dec303e659ce..9abd4ca5faa59 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java @@ -19,7 +19,6 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -370,11 +369,10 @@ public void testTonsOfBucketsTriggersBreaker() throws IOException { } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC)); try ( AggregationContext context = createAggregationContext( - indexSearcher, + indexReader, createIndexSettings(), new MatchAllDocsQuery(), breaker, @@ -575,9 +573,8 @@ private InternalAggregation collect( } try (DirectoryReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); InternalAggregation agg = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(builder, fields).withShouldBeCached(shouldBeCached).withQuery(query) ); verifyOutputFieldNames(builder, agg); diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java index 9630dc0547d85..60cc7c847b5e3 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java @@ -209,6 +209,7 @@ public Collection createComponents( EnrichPolicyExecutor enrichPolicyExecutor = new EnrichPolicyExecutor( settings, clusterService, + indicesService, client, threadPool, expressionResolver, diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java index 011635783a2db..ecb03615307f9 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; @@ -41,6 +42,7 @@ public class EnrichPolicyExecutor { public static final String TASK_ACTION = "policy_execution"; private final ClusterService clusterService; + private final IndicesService indicesService; private final Client client; private final ThreadPool threadPool; private final IndexNameExpressionResolver indexNameExpressionResolver; @@ -54,6 +56,7 @@ public class EnrichPolicyExecutor { public EnrichPolicyExecutor( Settings settings, ClusterService clusterService, + IndicesService indicesService, Client client, ThreadPool threadPool, IndexNameExpressionResolver indexNameExpressionResolver, @@ -61,6 +64,7 @@ public EnrichPolicyExecutor( LongSupplier nowSupplier ) { this.clusterService = clusterService; + this.indicesService = indicesService; this.client = client; this.threadPool = threadPool; this.indexNameExpressionResolver = indexNameExpressionResolver; @@ -215,6 +219,7 @@ private Runnable createPolicyRunner( task, listener, clusterService, + indicesService, client, indexNameExpressionResolver, enrichIndexName, diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java index 9b452af99df8b..a3c3e65171d83 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java @@ -33,19 +33,24 @@ import org.elasticsearch.client.internal.FilterClient; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.iterable.Iterables; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.index.reindex.ScrollableHitSource; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.xcontent.XContentBuilder; @@ -84,6 +89,7 @@ public class EnrichPolicyRunner implements Runnable { private final ExecuteEnrichPolicyTask task; private final ActionListener listener; private final ClusterService clusterService; + private final IndicesService indicesService; private final Client client; private final IndexNameExpressionResolver indexNameExpressionResolver; private final String enrichIndexName; @@ -96,6 +102,7 @@ public class EnrichPolicyRunner implements Runnable { ExecuteEnrichPolicyTask task, ActionListener listener, ClusterService clusterService, + IndicesService indicesService, Client client, IndexNameExpressionResolver indexNameExpressionResolver, String enrichIndexName, @@ -107,6 +114,7 @@ public class EnrichPolicyRunner implements Runnable { this.task = Objects.requireNonNull(task); this.listener = Objects.requireNonNull(listener); this.clusterService = Objects.requireNonNull(clusterService); + this.indicesService = indicesService; this.client = wrapClient(client, policyName, task, clusterService); this.indexNameExpressionResolver = Objects.requireNonNull(indexNameExpressionResolver); this.enrichIndexName = enrichIndexName; @@ -342,6 +350,7 @@ static Map mappingForMatchField(EnrichPolicy policy, List> sourceMappings) { Map> fieldMappings = new HashMap<>(); Map mappingForMatchField = mappingForMatchField(policy, sourceMappings); + MapperService mapperService = createMapperServiceForValidation(indicesService, enrichIndexName); for (String enrichField : policy.getEnrichFields()) { if (enrichField.equals(policy.getMatchField())) { mappingForMatchField = new HashMap<>(mappingForMatchField); @@ -354,7 +363,9 @@ private XContentBuilder createEnrichMapping(List> sourceMapp if (typeAndFormat.format != null) { mapping.put("format", typeAndFormat.format); } - mapping.put("index", false); // disable index + if (isIndexableField(mapperService, enrichField, typeAndFormat.type, mapping)) { + mapping.put("index", false); + } fieldMappings.put(enrichField, mapping); } } @@ -397,6 +408,27 @@ private XContentBuilder createEnrichMapping(List> sourceMapp } } + private static MapperService createMapperServiceForValidation(IndicesService indicesService, String index) { + try { + final Settings idxSettings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .build(); + IndexMetadata indexMetadata = IndexMetadata.builder(index).settings(idxSettings).numberOfShards(1).numberOfReplicas(0).build(); + return indicesService.createIndexMapperServiceForValidation(indexMetadata); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private static boolean isIndexableField(MapperService mapperService, String field, String type, Map properties) { + properties = new HashMap<>(properties); + properties.put("index", false); + Mapper.TypeParser parser = mapperService.getMapperRegistry().getMapperParser(type, IndexVersion.current()); + parser.parse(field, properties, mapperService.parserContext()); + return properties.containsKey("index") == false; + } + private void prepareAndCreateEnrichIndex(List> mappings) { Settings enrichIndexSettings = Settings.builder() .put("index.number_of_shards", 1) diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java index 5213736904954..e7a022e841a85 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java @@ -77,6 +77,7 @@ public void testNonConcurrentPolicyCoordination() throws InterruptedException { final EnrichPolicyExecutor testExecutor = new EnrichPolicyExecutor( Settings.EMPTY, null, + null, client, testThreadPool, TestIndexNameExpressionResolver.newInstance(testThreadPool.getThreadContext()), @@ -132,6 +133,7 @@ public void testMaximumPolicyExecutionLimit() throws InterruptedException { final EnrichPolicyExecutor testExecutor = new EnrichPolicyExecutor( testSettings, null, + null, client, testThreadPool, TestIndexNameExpressionResolver.newInstance(testThreadPool.getThreadContext()), @@ -266,6 +268,7 @@ protected void final EnrichPolicyExecutor testExecutor = new EnrichPolicyExecutor( Settings.EMPTY, null, + null, client, testThreadPool, TestIndexNameExpressionResolver.newInstance(testThreadPool.getThreadContext()), @@ -389,6 +392,7 @@ public void testRunPolicyLocallyMissingPolicy() { Settings.EMPTY, clusterService, null, + null, testThreadPool, TestIndexNameExpressionResolver.newInstance(testThreadPool.getThreadContext()), new EnrichPolicyLocks(), diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index b86171a97ea3b..d8e582c9fb880 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -45,6 +45,7 @@ import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; @@ -1871,6 +1872,7 @@ public void onFailure(Exception e) { task, wrappedListener, clusterService, + getInstanceFromNode(IndicesService.class), client(), resolver, createdEnrichIndex, @@ -2157,6 +2159,100 @@ public void testEnrichMappingConflictFormats() { assertThat(hit1, equalTo(Map.of("user", "u2", "date", "2023-05"))); } + public void testEnrichObjectField() { + createIndex("source-1", Settings.EMPTY, "_doc", "id", "type=keyword", "name.first", "type=keyword", "name.last", "type=keyword"); + client().prepareIndex("source-1") + .setSource("user", "u1", "name.first", "F1", "name.last", "L1") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of("source-1"), "user", List.of("name")); + String policyName = "test1"; + final long createTime = randomNonNegativeLong(); + String createdEnrichIndex = ".enrich-test1-" + createTime; + PlainActionFuture future = new PlainActionFuture<>(); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, future, createdEnrichIndex); + enrichPolicyRunner.run(); + future.actionGet(); + + // Validate Index definition + GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).sourceAsMap(); + assertEnrichMapping(mapping, """ + { + "user": { + "type": "keyword", + "doc_values": false + }, + "name": { + "type": "object" + } + } + """); + SearchResponse searchResponse = client().search(new SearchRequest(".enrich-test1")).actionGet(); + ElasticsearchAssertions.assertHitCount(searchResponse, 1L); + Map hit0 = searchResponse.getHits().getAt(0).getSourceAsMap(); + assertThat(hit0, equalTo(Map.of("user", "u1", "name.first", "F1", "name.last", "L1"))); + } + + public void testEnrichNestedField() throws Exception { + final String sourceIndex = "source-index"; + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("user") + .field("type", "keyword") + .endObject() + .startObject("nesting") + .field("type", "nested") + .startObject("properties") + .startObject("key") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .startObject("field2") + .field("type", "integer") + .endObject() + .endObject() + .endObject() + .endObject(); + CreateIndexResponse createResponse = indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(mappingBuilder)).actionGet(); + assertTrue(createResponse.isAcknowledged()); + + String policyName = "test1"; + List enrichFields = List.of("nesting", "field2"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of(sourceIndex), "user", enrichFields); + + final long createTime = randomNonNegativeLong(); + String createdEnrichIndex = ".enrich-test1-" + createTime; + PlainActionFuture future = new PlainActionFuture<>(); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, future, createdEnrichIndex); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + future.actionGet(); + + // Validate Index definition + GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).sourceAsMap(); + assertEnrichMapping(mapping, """ + { + "user": { + "type": "keyword", + "doc_values": false + }, + "field2": { + "type": "integer", + "index": false + }, + "nesting": { + "type": "nested" + } + } + """); + } + private EnrichPolicyRunner createPolicyRunner( String policyName, EnrichPolicy policy, @@ -2220,6 +2316,7 @@ public void onFailure(Exception e) { task, wrappedListener, clusterService, + getInstanceFromNode(IndicesService.class), client, resolver, targetIndex, diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java index cda4ac3ddff80..4bef4f69cd6d3 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java @@ -44,11 +44,10 @@ import static org.elasticsearch.compute.gen.Types.GROUPING_AGGREGATOR_FUNCTION; import static org.elasticsearch.compute.gen.Types.GROUPING_AGGREGATOR_FUNCTION_ADD_INPUT; import static org.elasticsearch.compute.gen.Types.INTERMEDIATE_STATE_DESC; +import static org.elasticsearch.compute.gen.Types.INT_BLOCK; import static org.elasticsearch.compute.gen.Types.INT_VECTOR; import static org.elasticsearch.compute.gen.Types.LIST_AGG_FUNC_DESC; import static org.elasticsearch.compute.gen.Types.LIST_INTEGER; -import static org.elasticsearch.compute.gen.Types.LONG_BLOCK; -import static org.elasticsearch.compute.gen.Types.LONG_VECTOR; import static org.elasticsearch.compute.gen.Types.PAGE; import static org.elasticsearch.compute.gen.Types.SEEN_GROUP_IDS; import static org.elasticsearch.compute.gen.Types.blockType; @@ -159,10 +158,10 @@ private TypeSpec type() { builder.addMethod(intermediateStateDesc()); builder.addMethod(intermediateBlockCount()); builder.addMethod(prepareProcessPage()); - builder.addMethod(addRawInputLoop(LONG_VECTOR, valueBlockType(init, combine))); - builder.addMethod(addRawInputLoop(LONG_VECTOR, valueVectorType(init, combine))); - builder.addMethod(addRawInputLoop(LONG_BLOCK, valueBlockType(init, combine))); - builder.addMethod(addRawInputLoop(LONG_BLOCK, valueVectorType(init, combine))); + builder.addMethod(addRawInputLoop(INT_VECTOR, valueBlockType(init, combine))); + builder.addMethod(addRawInputLoop(INT_VECTOR, valueVectorType(init, combine))); + builder.addMethod(addRawInputLoop(INT_BLOCK, valueBlockType(init, combine))); + builder.addMethod(addRawInputLoop(INT_BLOCK, valueVectorType(init, combine))); builder.addMethod(addIntermediateInput()); builder.addMethod(addIntermediateRowInput()); builder.addMethod(evaluateIntermediate()); @@ -281,12 +280,12 @@ private TypeSpec addInput(Consumer addBlock) { builder.addSuperinterface(GROUPING_AGGREGATOR_FUNCTION_ADD_INPUT); MethodSpec.Builder block = MethodSpec.methodBuilder("add").addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); - block.addParameter(TypeName.INT, "positionOffset").addParameter(LONG_BLOCK, "groupIds"); + block.addParameter(TypeName.INT, "positionOffset").addParameter(INT_BLOCK, "groupIds"); addBlock.accept(block); builder.addMethod(block.build()); MethodSpec.Builder vector = MethodSpec.methodBuilder("add").addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); - vector.addParameter(TypeName.INT, "positionOffset").addParameter(LONG_VECTOR, "groupIds"); + vector.addParameter(TypeName.INT, "positionOffset").addParameter(INT_VECTOR, "groupIds"); addBlock.accept(vector); builder.addMethod(vector.build()); @@ -295,7 +294,7 @@ private TypeSpec addInput(Consumer addBlock) { /** * Generate an {@code addRawInput} method to perform the actual aggregation. - * @param groupsType The type of the group key, always {@code LongBlock} or {@code LongVector} + * @param groupsType The type of the group key, always {@code IntBlock} or {@code IntVector} * @param valuesType The type of the values to consume, always a subclass of {@code Block} or a subclass of {@code Vector} */ private MethodSpec addRawInputLoop(TypeName groupsType, TypeName valuesType) { @@ -320,9 +319,9 @@ private MethodSpec addRawInputLoop(TypeName groupsType, TypeName valuesType) { builder.addStatement("int groupStart = groups.getFirstValueIndex(groupPosition)"); builder.addStatement("int groupEnd = groupStart + groups.getValueCount(groupPosition)"); builder.beginControlFlow("for (int g = groupStart; g < groupEnd; g++)"); - builder.addStatement("int groupId = Math.toIntExact(groups.getLong(g))"); + builder.addStatement("int groupId = Math.toIntExact(groups.getInt(g))"); } else { - builder.addStatement("int groupId = Math.toIntExact(groups.getLong(groupPosition))"); + builder.addStatement("int groupId = Math.toIntExact(groups.getInt(groupPosition))"); } if (valuesIsBlock) { @@ -409,7 +408,7 @@ private MethodSpec addIntermediateInput() { MethodSpec.Builder builder = MethodSpec.methodBuilder("addIntermediateInput"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); builder.addParameter(TypeName.INT, "positionOffset"); - builder.addParameter(LONG_VECTOR, "groups"); + builder.addParameter(INT_VECTOR, "groups"); builder.addParameter(PAGE, "page"); builder.addStatement("state.enableGroupIdTracking(new $T.Empty())", SEEN_GROUP_IDS); @@ -439,7 +438,7 @@ private MethodSpec addIntermediateInput() { } builder.beginControlFlow("for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++)"); { - builder.addStatement("int groupId = Math.toIntExact(groups.getLong(groupPosition))"); + builder.addStatement("int groupId = Math.toIntExact(groups.getInt(groupPosition))"); if (hasPrimitiveState()) { assert intermediateState.size() == 2; assert intermediateState.get(1).name().equals("seen"); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java index 13a0849504a4a..48aec38b800ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java @@ -14,7 +14,7 @@ import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.IntBlock; import java.util.Arrays; @@ -140,11 +140,11 @@ public BytesRefBlock dedupeToBlockUsingCopyMissing() { } /** - * Dedupe values and build a {@link LongBlock} suitable for passing + * Dedupe values and build a {@link IntBlock} suitable for passing * as the grouping block to a {@link GroupingAggregatorFunction}. */ public MultivalueDedupe.HashResult hash(BytesRefHash hash) { - LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); + IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); boolean sawNull = false; for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); @@ -152,7 +152,7 @@ public MultivalueDedupe.HashResult hash(BytesRefHash hash) { switch (count) { case 0 -> { sawNull = true; - builder.appendLong(0); + builder.appendInt(0); } case 1 -> { BytesRef v = block.getBytesRef(first, work[0]); @@ -312,7 +312,7 @@ private void writeSortedWork(BytesRefBlock.Builder builder) { /** * Writes an already deduplicated {@link #work} to a hash. */ - private void hashUniquedWork(BytesRefHash hash, LongBlock.Builder builder) { + private void hashUniquedWork(BytesRefHash hash, IntBlock.Builder builder) { if (w == 1) { hash(builder, hash, work[0]); return; @@ -327,7 +327,7 @@ private void hashUniquedWork(BytesRefHash hash, LongBlock.Builder builder) { /** * Writes a sorted {@link #work} to a hash, skipping duplicates. */ - private void hashSortedWork(BytesRefHash hash, LongBlock.Builder builder) { + private void hashSortedWork(BytesRefHash hash, IntBlock.Builder builder) { if (w == 1) { hash(builder, hash, work[0]); return; @@ -383,7 +383,7 @@ private void fillWork(int from, int to) { } } - private void hash(LongBlock.Builder builder, BytesRefHash hash, BytesRef v) { - builder.appendLong(BlockHash.hashOrdToGroupNullReserved(hash.add(v))); + private void hash(IntBlock.Builder builder, BytesRefHash hash, BytesRef v) { + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(v)))); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java index 1f451c2cdac11..d30292f6fa32c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java @@ -13,7 +13,7 @@ import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.IntBlock; import java.util.Arrays; @@ -137,11 +137,11 @@ public DoubleBlock dedupeToBlockUsingCopyMissing() { } /** - * Dedupe values and build a {@link LongBlock} suitable for passing + * Dedupe values and build a {@link IntBlock} suitable for passing * as the grouping block to a {@link GroupingAggregatorFunction}. */ public MultivalueDedupe.HashResult hash(LongHash hash) { - LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); + IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); boolean sawNull = false; for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); @@ -149,7 +149,7 @@ public MultivalueDedupe.HashResult hash(LongHash hash) { switch (count) { case 0 -> { sawNull = true; - builder.appendLong(0); + builder.appendInt(0); } case 1 -> { double v = block.getDouble(first); @@ -301,7 +301,7 @@ private void writeSortedWork(DoubleBlock.Builder builder) { /** * Writes an already deduplicated {@link #work} to a hash. */ - private void hashUniquedWork(LongHash hash, LongBlock.Builder builder) { + private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { hash(builder, hash, work[0]); return; @@ -316,7 +316,7 @@ private void hashUniquedWork(LongHash hash, LongBlock.Builder builder) { /** * Writes a sorted {@link #work} to a hash, skipping duplicates. */ - private void hashSortedWork(LongHash hash, LongBlock.Builder builder) { + private void hashSortedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { hash(builder, hash, work[0]); return; @@ -361,7 +361,7 @@ private void grow(int size) { work = ArrayUtil.grow(work, size); } - private void hash(LongBlock.Builder builder, LongHash hash, double v) { - builder.appendLong(BlockHash.hashOrdToGroupNullReserved(hash.add(Double.doubleToLongBits(v)))); + private void hash(IntBlock.Builder builder, LongHash hash, double v) { + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(Double.doubleToLongBits(v))))); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java index e8e9f60189f15..cda9308a7e6d2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java @@ -13,7 +13,6 @@ import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; import java.util.Arrays; @@ -137,11 +136,11 @@ public IntBlock dedupeToBlockUsingCopyMissing() { } /** - * Dedupe values and build a {@link LongBlock} suitable for passing + * Dedupe values and build a {@link IntBlock} suitable for passing * as the grouping block to a {@link GroupingAggregatorFunction}. */ public MultivalueDedupe.HashResult hash(LongHash hash) { - LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); + IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); boolean sawNull = false; for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); @@ -149,7 +148,7 @@ public MultivalueDedupe.HashResult hash(LongHash hash) { switch (count) { case 0 -> { sawNull = true; - builder.appendLong(0); + builder.appendInt(0); } case 1 -> { int v = block.getInt(first); @@ -301,7 +300,7 @@ private void writeSortedWork(IntBlock.Builder builder) { /** * Writes an already deduplicated {@link #work} to a hash. */ - private void hashUniquedWork(LongHash hash, LongBlock.Builder builder) { + private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { hash(builder, hash, work[0]); return; @@ -316,7 +315,7 @@ private void hashUniquedWork(LongHash hash, LongBlock.Builder builder) { /** * Writes a sorted {@link #work} to a hash, skipping duplicates. */ - private void hashSortedWork(LongHash hash, LongBlock.Builder builder) { + private void hashSortedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { hash(builder, hash, work[0]); return; @@ -361,7 +360,7 @@ private void grow(int size) { work = ArrayUtil.grow(work, size); } - private void hash(LongBlock.Builder builder, LongHash hash, int v) { - builder.appendLong(BlockHash.hashOrdToGroupNullReserved(hash.add(v))); + private void hash(IntBlock.Builder builder, LongHash hash, int v) { + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(v)))); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java index f334e1bd3f61f..0266131fba37c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import java.util.Arrays; @@ -137,11 +138,11 @@ public LongBlock dedupeToBlockUsingCopyMissing() { } /** - * Dedupe values and build a {@link LongBlock} suitable for passing + * Dedupe values and build a {@link IntBlock} suitable for passing * as the grouping block to a {@link GroupingAggregatorFunction}. */ public MultivalueDedupe.HashResult hash(LongHash hash) { - LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); + IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); boolean sawNull = false; for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); @@ -149,7 +150,7 @@ public MultivalueDedupe.HashResult hash(LongHash hash) { switch (count) { case 0 -> { sawNull = true; - builder.appendLong(0); + builder.appendInt(0); } case 1 -> { long v = block.getLong(first); @@ -301,7 +302,7 @@ private void writeSortedWork(LongBlock.Builder builder) { /** * Writes an already deduplicated {@link #work} to a hash. */ - private void hashUniquedWork(LongHash hash, LongBlock.Builder builder) { + private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { hash(builder, hash, work[0]); return; @@ -316,7 +317,7 @@ private void hashUniquedWork(LongHash hash, LongBlock.Builder builder) { /** * Writes a sorted {@link #work} to a hash, skipping duplicates. */ - private void hashSortedWork(LongHash hash, LongBlock.Builder builder) { + private void hashSortedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { hash(builder, hash, work[0]); return; @@ -361,7 +362,7 @@ private void grow(int size) { work = ArrayUtil.grow(work, size); } - private void hash(LongBlock.Builder builder, LongHash hash, long v) { - builder.appendLong(BlockHash.hashOrdToGroupNullReserved(hash.add(v))); + private void hash(IntBlock.Builder builder, LongHash hash, long v) { + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(v)))); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java index ea850cac245c7..54b0a9ef8493c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java @@ -14,9 +14,8 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -63,11 +62,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -79,32 +78,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, BooleanBlock values) { + private void addRawInput(int positionOffset, IntVector groups, BooleanBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -116,14 +115,14 @@ private void addRawInput(int positionOffset, LongVector groups, BooleanBlock val } } - private void addRawInput(int positionOffset, LongVector groups, BooleanVector values) { + private void addRawInput(int positionOffset, IntVector groups, BooleanVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); CountDistinctBooleanAggregator.combine(state, groupId, values.getBoolean(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, BooleanBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, BooleanBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -131,7 +130,7 @@ private void addRawInput(int positionOffset, LongBlock groups, BooleanBlock valu int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -144,7 +143,7 @@ private void addRawInput(int positionOffset, LongBlock groups, BooleanBlock valu } } - private void addRawInput(int positionOffset, LongBlock groups, BooleanVector values) { + private void addRawInput(int positionOffset, IntBlock groups, BooleanVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -152,21 +151,21 @@ private void addRawInput(int positionOffset, LongBlock groups, BooleanVector val int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); CountDistinctBooleanAggregator.combine(state, groupId, values.getBoolean(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BooleanVector fbit = page.getBlock(channels.get(0)).asVector(); BooleanVector tbit = page.getBlock(channels.get(1)).asVector(); assert fbit.getPositionCount() == tbit.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); CountDistinctBooleanAggregator.combineIntermediate(state, groupId, fbit.getBoolean(groupPosition + positionOffset), tbit.getBoolean(groupPosition + positionOffset)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java index f1de6efc814b9..8c6fd4a28dba3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java @@ -15,9 +15,8 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -66,11 +65,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -82,33 +81,33 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, BytesRefBlock values) { + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -120,15 +119,15 @@ private void addRawInput(int positionOffset, LongVector groups, BytesRefBlock va } } - private void addRawInput(int positionOffset, LongVector groups, BytesRefVector values) { + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); CountDistinctBytesRefAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); } } - private void addRawInput(int positionOffset, LongBlock groups, BytesRefBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { @@ -137,7 +136,7 @@ private void addRawInput(int positionOffset, LongBlock groups, BytesRefBlock val int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -150,7 +149,7 @@ private void addRawInput(int positionOffset, LongBlock groups, BytesRefBlock val } } - private void addRawInput(int positionOffset, LongBlock groups, BytesRefVector values) { + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { @@ -159,20 +158,20 @@ private void addRawInput(int positionOffset, LongBlock groups, BytesRefVector va int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); CountDistinctBytesRefAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); CountDistinctBytesRefAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java index 6691d29ae712b..868f4410c95fe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java @@ -17,9 +17,8 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -68,11 +67,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -84,32 +83,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -121,14 +120,14 @@ private void addRawInput(int positionOffset, LongVector groups, DoubleBlock valu } } - private void addRawInput(int positionOffset, LongVector groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); CountDistinctDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -136,7 +135,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -149,7 +148,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -157,20 +156,20 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleVector valu int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); CountDistinctDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); CountDistinctDoubleAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java index b0935f78d95a2..98579e6c008fb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java @@ -17,8 +17,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -67,11 +65,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -83,32 +81,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, IntBlock values) { + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -120,14 +118,14 @@ private void addRawInput(int positionOffset, LongVector groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongVector groups, IntVector values) { + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); CountDistinctIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -135,7 +133,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -148,7 +146,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, IntVector values) { + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -156,20 +154,20 @@ private void addRawInput(int positionOffset, LongBlock groups, IntVector values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); CountDistinctIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); CountDistinctIntAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java index a9795b0388439..4af5616f7d275 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -66,11 +67,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -82,32 +83,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, LongBlock values) { + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -119,14 +120,14 @@ private void addRawInput(int positionOffset, LongVector groups, LongBlock values } } - private void addRawInput(int positionOffset, LongVector groups, LongVector values) { + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); CountDistinctLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -134,7 +135,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -147,7 +148,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, LongVector values) { + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -155,20 +156,20 @@ private void addRawInput(int positionOffset, LongBlock groups, LongVector values int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); CountDistinctLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); CountDistinctLongAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java index ab22615e03b76..29fe586d9c362 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java @@ -16,9 +16,8 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -61,11 +60,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -77,32 +76,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -114,14 +113,14 @@ private void addRawInput(int positionOffset, LongVector groups, DoubleBlock valu } } - private void addRawInput(int positionOffset, LongVector groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -129,7 +128,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -142,7 +141,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -150,21 +149,21 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleVector valu int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); DoubleVector max = page.getBlock(channels.get(0)).asVector(); BooleanVector seen = page.getBlock(channels.get(1)).asVector(); assert max.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (seen.getBoolean(groupPosition + positionOffset)) { state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), max.getDouble(groupPosition + positionOffset))); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java index b825912add9e0..e368789a0b6ec 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java @@ -16,8 +16,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -60,11 +58,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -76,32 +74,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, IntBlock values) { + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -113,14 +111,14 @@ private void addRawInput(int positionOffset, LongVector groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongVector groups, IntVector values) { + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); } } - private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -128,7 +126,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -141,7 +139,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, IntVector values) { + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -149,21 +147,21 @@ private void addRawInput(int positionOffset, LongBlock groups, IntVector values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); IntVector max = page.getBlock(channels.get(0)).asVector(); BooleanVector seen = page.getBlock(channels.get(1)).asVector(); assert max.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (seen.getBoolean(groupPosition + positionOffset)) { state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), max.getInt(groupPosition + positionOffset))); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java index 02f2352e361eb..df9e1abd2544c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -59,11 +60,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -75,32 +76,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, LongBlock values) { + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -112,14 +113,14 @@ private void addRawInput(int positionOffset, LongVector groups, LongBlock values } } - private void addRawInput(int positionOffset, LongVector groups, LongVector values) { + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); } } - private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -127,7 +128,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -140,7 +141,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, LongVector values) { + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -148,21 +149,21 @@ private void addRawInput(int positionOffset, LongBlock groups, LongVector values int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); LongVector max = page.getBlock(channels.get(0)).asVector(); BooleanVector seen = page.getBlock(channels.get(1)).asVector(); assert max.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (seen.getBoolean(groupPosition + positionOffset)) { state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), max.getLong(groupPosition + positionOffset))); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java index 96a8ccf0c86f2..35d23d56c54f6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java @@ -17,9 +17,8 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -65,11 +64,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -81,32 +80,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -118,14 +117,14 @@ private void addRawInput(int positionOffset, LongVector groups, DoubleBlock valu } } - private void addRawInput(int positionOffset, LongVector groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); MedianAbsoluteDeviationDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -133,7 +132,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -146,7 +145,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -154,20 +153,20 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleVector valu int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); MedianAbsoluteDeviationDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); MedianAbsoluteDeviationDoubleAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java index 5cbcb3e9898ed..29eb155a57356 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java @@ -17,8 +17,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -64,11 +62,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -80,32 +78,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, IntBlock values) { + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -117,14 +115,14 @@ private void addRawInput(int positionOffset, LongVector groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongVector groups, IntVector values) { + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); MedianAbsoluteDeviationIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -132,7 +130,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -145,7 +143,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, IntVector values) { + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -153,20 +151,20 @@ private void addRawInput(int positionOffset, LongBlock groups, IntVector values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); MedianAbsoluteDeviationIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); MedianAbsoluteDeviationIntAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java index 34efae2c90a7f..32a90d1d9412b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -63,11 +64,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -79,32 +80,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, LongBlock values) { + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -116,14 +117,14 @@ private void addRawInput(int positionOffset, LongVector groups, LongBlock values } } - private void addRawInput(int positionOffset, LongVector groups, LongVector values) { + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); MedianAbsoluteDeviationLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -131,7 +132,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -144,7 +145,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, LongVector values) { + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -152,20 +153,20 @@ private void addRawInput(int positionOffset, LongBlock groups, LongVector values int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); MedianAbsoluteDeviationLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); MedianAbsoluteDeviationLongAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java index d6fbf83b2c855..6b3466223097d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java @@ -16,9 +16,8 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -61,11 +60,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -77,32 +76,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -114,14 +113,14 @@ private void addRawInput(int positionOffset, LongVector groups, DoubleBlock valu } } - private void addRawInput(int positionOffset, LongVector groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -129,7 +128,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -142,7 +141,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -150,21 +149,21 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleVector valu int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); DoubleVector min = page.getBlock(channels.get(0)).asVector(); BooleanVector seen = page.getBlock(channels.get(1)).asVector(); assert min.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (seen.getBoolean(groupPosition + positionOffset)) { state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), min.getDouble(groupPosition + positionOffset))); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java index b9d8c316dc561..5f6df728eeb21 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java @@ -16,8 +16,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -60,11 +58,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -76,32 +74,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, IntBlock values) { + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -113,14 +111,14 @@ private void addRawInput(int positionOffset, LongVector groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongVector groups, IntVector values) { + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); } } - private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -128,7 +126,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -141,7 +139,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, IntVector values) { + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -149,21 +147,21 @@ private void addRawInput(int positionOffset, LongBlock groups, IntVector values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); IntVector min = page.getBlock(channels.get(0)).asVector(); BooleanVector seen = page.getBlock(channels.get(1)).asVector(); assert min.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (seen.getBoolean(groupPosition + positionOffset)) { state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), min.getInt(groupPosition + positionOffset))); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java index d5eca10b40286..a1d1d3035796f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -59,11 +60,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -75,32 +76,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, LongBlock values) { + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -112,14 +113,14 @@ private void addRawInput(int positionOffset, LongVector groups, LongBlock values } } - private void addRawInput(int positionOffset, LongVector groups, LongVector values) { + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); } } - private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -127,7 +128,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -140,7 +141,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, LongVector values) { + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -148,21 +149,21 @@ private void addRawInput(int positionOffset, LongBlock groups, LongVector values int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); LongVector min = page.getBlock(channels.get(0)).asVector(); BooleanVector seen = page.getBlock(channels.get(1)).asVector(); assert min.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (seen.getBoolean(groupPosition + positionOffset)) { state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), min.getLong(groupPosition + positionOffset))); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java index 859b9b3434d74..ecf5ee0653cd4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java @@ -17,9 +17,8 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -68,11 +67,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -84,32 +83,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -121,14 +120,14 @@ private void addRawInput(int positionOffset, LongVector groups, DoubleBlock valu } } - private void addRawInput(int positionOffset, LongVector groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); PercentileDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -136,7 +135,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -149,7 +148,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -157,20 +156,20 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleVector valu int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); PercentileDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); PercentileDoubleAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java index 2b86de9e0d12b..dc11e5b741c84 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java @@ -17,8 +17,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -67,11 +65,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -83,32 +81,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, IntBlock values) { + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -120,14 +118,14 @@ private void addRawInput(int positionOffset, LongVector groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongVector groups, IntVector values) { + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); PercentileIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -135,7 +133,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -148,7 +146,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, IntVector values) { + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -156,20 +154,20 @@ private void addRawInput(int positionOffset, LongBlock groups, IntVector values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); PercentileIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); PercentileIntAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java index abb832adc4964..e172e4dea31c3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -66,11 +67,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -82,32 +83,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, LongBlock values) { + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -119,14 +120,14 @@ private void addRawInput(int positionOffset, LongVector groups, LongBlock values } } - private void addRawInput(int positionOffset, LongVector groups, LongVector values) { + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); PercentileLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -134,7 +135,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -147,7 +148,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, LongVector values) { + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -155,20 +156,20 @@ private void addRawInput(int positionOffset, LongBlock groups, LongVector values int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); PercentileLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); PercentileLongAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java index 4df1638ebb8bf..d34d71d069de1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java @@ -16,9 +16,8 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; /** @@ -66,11 +65,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -82,32 +81,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -119,14 +118,14 @@ private void addRawInput(int positionOffset, LongVector groups, DoubleBlock valu } } - private void addRawInput(int positionOffset, LongVector groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); SumDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -134,7 +133,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -147,7 +146,7 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleBlock value } } - private void addRawInput(int positionOffset, LongBlock groups, DoubleVector values) { + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -155,14 +154,14 @@ private void addRawInput(int positionOffset, LongBlock groups, DoubleVector valu int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); SumDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); DoubleVector value = page.getBlock(channels.get(0)).asVector(); @@ -170,7 +169,7 @@ public void addIntermediateInput(int positionOffset, LongVector groups, Page pag BooleanVector seen = page.getBlock(channels.get(2)).asVector(); assert value.getPositionCount() == delta.getPositionCount() && value.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); SumDoubleAggregator.combineIntermediate(state, groupId, value.getDouble(groupPosition + positionOffset), delta.getDouble(groupPosition + positionOffset), seen.getBoolean(groupPosition + positionOffset)); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java index 27fc33a83abd6..d008dc0a7b765 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java @@ -60,11 +60,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -76,32 +76,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, IntBlock values) { + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -113,14 +113,14 @@ private void addRawInput(int positionOffset, LongVector groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongVector groups, IntVector values) { + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); } } - private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -128,7 +128,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -141,7 +141,7 @@ private void addRawInput(int positionOffset, LongBlock groups, IntBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, IntVector values) { + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -149,21 +149,21 @@ private void addRawInput(int positionOffset, LongBlock groups, IntVector values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); LongVector sum = page.getBlock(channels.get(0)).asVector(); BooleanVector seen = page.getBlock(channels.get(1)).asVector(); assert sum.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (seen.getBoolean(groupPosition + positionOffset)) { state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), sum.getLong(groupPosition + positionOffset))); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java index 2ae2d3c2b6338..e620884398621 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -59,11 +60,11 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG state.enableGroupIdTracking(seenGroupIds); return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { } }; } @@ -75,32 +76,32 @@ public void add(int positionOffset, LongVector groupIds) { } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesVector); } }; } - private void addRawInput(int positionOffset, LongVector groups, LongBlock values) { + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -112,14 +113,14 @@ private void addRawInput(int positionOffset, LongVector groups, LongBlock values } } - private void addRawInput(int positionOffset, LongVector groups, LongVector values) { + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); } } - private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) { + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -127,7 +128,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(groupPosition + positionOffset)) { continue; } @@ -140,7 +141,7 @@ private void addRawInput(int positionOffset, LongBlock groups, LongBlock values) } } - private void addRawInput(int positionOffset, LongBlock groups, LongVector values) { + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; @@ -148,21 +149,21 @@ private void addRawInput(int positionOffset, LongBlock groups, LongVector values int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); LongVector sum = page.getBlock(channels.get(0)).asVector(); BooleanVector seen = page.getBlock(channels.get(1)).asVector(); assert sum.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (seen.getBoolean(groupPosition + positionOffset)) { state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), sum.getLong(groupPosition + positionOffset))); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java index fc40faa0e08c4..078e0cff99daa 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -55,10 +56,10 @@ public AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { state.enableGroupIdTracking(seenGroupIds); return new AddInput() { // TODO return null meaning "don't collect me" and skip those @Override - public void add(int positionOffset, LongBlock groupIds) {} + public void add(int positionOffset, IntBlock groupIds) {} @Override - public void add(int positionOffset, LongVector groupIds) {} + public void add(int positionOffset, IntVector groupIds) {} }; } Vector valuesVector = valuesBlock.asVector(); @@ -68,33 +69,33 @@ public void add(int positionOffset, LongVector groupIds) {} } return new AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } }; } return new AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { addRawInput(groupIds); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { addRawInput(groupIds); } }; } - private void addRawInput(int positionOffset, LongVector groups, Block values) { + private void addRawInput(int positionOffset, IntVector groups, Block values) { int position = positionOffset; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++, position++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); if (values.isNull(position)) { continue; } @@ -102,7 +103,7 @@ private void addRawInput(int positionOffset, LongVector groups, Block values) { } } - private void addRawInput(int positionOffset, LongBlock groups, Block values) { + private void addRawInput(int positionOffset, IntBlock groups, Block values) { int position = positionOffset; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++, position++) { if (groups.isNull(groupPosition)) { @@ -111,7 +112,7 @@ private void addRawInput(int positionOffset, LongBlock groups, Block values) { int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); if (values.isNull(position)) { continue; } @@ -120,29 +121,30 @@ private void addRawInput(int positionOffset, LongBlock groups, Block values) { } } - private void addRawInput(LongVector groups) { + private void addRawInput(IntVector groups) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getLong(groupPosition)); + int groupId = Math.toIntExact(groups.getInt(groupPosition)); state.increment(groupId, 1); } } - private void addRawInput(LongBlock groups) { + private void addRawInput(IntBlock groups) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + // TODO remove the check one we don't emit null anymore if (groups.isNull(groupPosition)) { continue; } int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getLong(g)); + int groupId = Math.toIntExact(groups.getInt(g)); state.increment(groupId, 1); } } } @Override - public void addIntermediateInput(int positionOffset, LongVector groups, Page page) { + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); state.enableGroupIdTracking(new SeenGroupIds.Empty()); @@ -150,7 +152,7 @@ public void addIntermediateInput(int positionOffset, LongVector groups, Page pag BooleanVector seen = page.getBlock(channels.get(1)).asVector(); assert count.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - state.increment(Math.toIntExact(groups.getLong(groupPosition)), count.getLong(groupPosition + positionOffset)); + state.increment(Math.toIntExact(groups.getInt(groupPosition)), count.getLong(groupPosition + positionOffset)); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregator.java index e78033e08f903..5a8c97582a9b5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregator.java @@ -9,9 +9,8 @@ import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasable; @@ -42,12 +41,12 @@ public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenG if (mode.isInputPartial()) { return new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { throw new IllegalStateException("Intermediate group id must not have nulls"); } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { aggregatorFunction.addIntermediateInput(positionOffset, groupIds, page); } }; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunction.java index 017300864a6a8..0419339f1dd3e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunction.java @@ -8,9 +8,8 @@ package org.elasticsearch.compute.aggregation; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.Vector; import org.elasticsearch.core.Releasable; @@ -48,14 +47,14 @@ interface AddInput { * @param groupIds {@link Block} of group id, some of which may be null * or multivalued */ - void add(int positionOffset, LongBlock groupIds); + void add(int positionOffset, IntBlock groupIds); /** * Send a batch of group ids to the aggregator. The {@code groupIds} * may be offset from the start of the block to allow for sending chunks * of group ids. *

- * See {@link #add(int, LongBlock)} for discussion on the offset. This + * See {@link #add(int, IntBlock)} for discussion on the offset. This * method can only be called with blocks contained in a {@link Vector} * which only allows a single value per position. *

@@ -64,7 +63,7 @@ interface AddInput { * @param groupIds {@link Vector} of group id, some of which may be null * or multivalued */ - void add(int positionOffset, LongVector groupIds); + void add(int positionOffset, IntVector groupIds); } /** @@ -79,7 +78,7 @@ interface AddInput { /** * Add data produced by {@link #evaluateIntermediate}. */ - void addIntermediateInput(int positionOffset, LongVector groupIdVector, Page page); + void addIntermediateInput(int positionOffset, IntVector groupIdVector, Page page); /** * Add the position-th row from the intermediate output of the given aggregator function to the groupId diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java index 277a201cfb54a..1a7bad4366be9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java @@ -12,10 +12,9 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.IntArrayVector; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.MultivalueDedupeBoolean; @@ -46,15 +45,15 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } } - private LongVector add(BooleanVector vector) { - long[] groups = new long[vector.getPositionCount()]; + private IntVector add(BooleanVector vector) { + int[] groups = new int[vector.getPositionCount()]; for (int i = 0; i < vector.getPositionCount(); i++) { groups[i] = MultivalueDedupeBoolean.hashOrd(everSeen, vector.getBoolean(i)); } - return new LongArrayVector(groups, groups.length); + return new IntArrayVector(groups, groups.length); } - private LongBlock add(BooleanBlock block) { + private IntBlock add(BooleanBlock block) { return new MultivalueDedupeBoolean(block).hash(everSeen); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index 3d5ef461c4adc..ee77e5e3c19b8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -20,10 +20,9 @@ import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntArrayVector; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeBytesRef; @@ -63,15 +62,15 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } } - private LongVector add(BytesRefVector vector) { - long[] groups = new long[vector.getPositionCount()]; + private IntVector add(BytesRefVector vector) { + int[] groups = new int[vector.getPositionCount()]; for (int i = 0; i < vector.getPositionCount(); i++) { - groups[i] = hashOrdToGroupNullReserved(bytesRefHash.add(vector.getBytesRef(i, bytes))); + groups[i] = Math.toIntExact(hashOrdToGroupNullReserved(bytesRefHash.add(vector.getBytesRef(i, bytes)))); } - return new LongArrayVector(groups, vector.getPositionCount()); + return new IntArrayVector(groups, vector.getPositionCount()); } - private LongBlock add(BytesRefBlock block) { + private IntBlock add(BytesRefBlock block) { MultivalueDedupe.HashResult result = new MultivalueDedupeBytesRef(block).hash(bytesRefHash); seenNull |= result.sawNull(); return result.ords(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java index aa90d4be69649..50fd1bb7b0943 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java @@ -18,8 +18,8 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; @@ -76,15 +76,15 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } } - public LongVector add(BytesRefVector vector1, LongVector vector2) { + public IntVector add(BytesRefVector vector1, LongVector vector2) { BytesRef scratch = new BytesRef(); int positions = vector1.getPositionCount(); - final long[] ords = new long[positions]; + final int[] ords = new int[positions]; for (int i = 0; i < positions; i++) { long hash1 = hashOrdToGroup(bytesHash.add(vector1.getBytesRef(i, scratch))); - ords[i] = hashOrdToGroup(finalHash.add(hash1, vector2.getLong(i))); + ords[i] = Math.toIntExact(hashOrdToGroup(finalHash.add(hash1, vector2.getLong(i)))); } - return new LongArrayVector(ords, positions); + return new IntArrayVector(ords, positions); } private static final long[] EMPTY = new long[0]; @@ -117,7 +117,7 @@ void add() { int count2 = block2.getValueCount(p); if (count1 == 1 && count2 == 1) { long bytesOrd = hashOrdToGroup(bytesHash.add(block1.getBytesRef(start1, scratch))); - ords.appendLong(hashOrdToGroup(finalHash.add(bytesOrd, block2.getLong(start2)))); + ords.appendInt(Math.toIntExact(hashOrdToGroup(finalHash.add(bytesOrd, block2.getLong(start2))))); addedValue(p); continue; } @@ -143,14 +143,14 @@ void add() { seenSize2 = LongLongBlockHash.add(seen2, seenSize2, block2.getLong(i)); } if (seenSize1 == 1 && seenSize2 == 1) { - ords.appendLong(hashOrdToGroup(finalHash.add(seen1[0], seen2[0]))); + ords.appendInt(Math.toIntExact(hashOrdToGroup(finalHash.add(seen1[0], seen2[0])))); addedValue(p); continue; } ords.beginPositionEntry(); for (int s1 = 0; s1 < seenSize1; s1++) { for (int s2 = 0; s2 < seenSize2; s2++) { - ords.appendLong(hashOrdToGroup(finalHash.add(seen1[s1], seen2[s2]))); + ords.appendInt(Math.toIntExact(hashOrdToGroup(finalHash.add(seen1[s1], seen2[s2])))); addedValueInMultivaluePosition(p); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index 79c748e7901a5..3a52beb9c2d87 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -17,10 +17,9 @@ import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntArrayVector; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeDouble; @@ -59,15 +58,15 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } } - private LongVector add(DoubleVector vector) { - long[] groups = new long[vector.getPositionCount()]; + private IntVector add(DoubleVector vector) { + int[] groups = new int[vector.getPositionCount()]; for (int i = 0; i < vector.getPositionCount(); i++) { - groups[i] = hashOrdToGroupNullReserved(longHash.add(Double.doubleToLongBits(vector.getDouble(i)))); + groups[i] = Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(Double.doubleToLongBits(vector.getDouble(i))))); } - return new LongArrayVector(groups, groups.length); + return new IntArrayVector(groups, groups.length); } - private LongBlock add(DoubleBlock block) { + private IntBlock add(DoubleBlock block) { MultivalueDedupe.HashResult result = new MultivalueDedupeDouble(block).hash(longHash); seenNull |= result.sawNull(); return result.ords(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index b4e991cebbe47..4fcd9735f6158 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -17,9 +17,6 @@ import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeInt; @@ -57,15 +54,15 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } } - private LongVector add(IntVector vector) { - long[] groups = new long[vector.getPositionCount()]; + private IntVector add(IntVector vector) { + int[] groups = new int[vector.getPositionCount()]; for (int i = 0; i < vector.getPositionCount(); i++) { - groups[i] = hashOrdToGroupNullReserved(longHash.add(vector.getInt(i))); + groups[i] = Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(vector.getInt(i)))); } - return new LongArrayVector(groups, groups.length); + return new IntArrayVector(groups, groups.length); } - private LongBlock add(IntBlock block) { + private IntBlock add(IntBlock block) { MultivalueDedupe.HashResult result = new MultivalueDedupeInt(block).hash(longHash); seenNull |= result.sawNull(); return result.ords(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index d5e57171e9c71..5e5b46ae6eda1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -13,6 +13,8 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntArrayVector; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongArrayBlock; import org.elasticsearch.compute.data.LongArrayVector; @@ -56,15 +58,15 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } } - private LongVector add(LongVector vector) { - long[] groups = new long[vector.getPositionCount()]; + private IntVector add(LongVector vector) { + int[] groups = new int[vector.getPositionCount()]; for (int i = 0; i < vector.getPositionCount(); i++) { - groups[i] = hashOrdToGroupNullReserved(longHash.add(vector.getLong(i))); + groups[i] = Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(vector.getLong(i)))); } - return new LongArrayVector(groups, groups.length); + return new IntArrayVector(groups, groups.length); } - private LongBlock add(LongBlock block) { + private IntBlock add(LongBlock block) { MultivalueDedupe.HashResult result = new MultivalueDedupeLong(block).hash(longHash); seenNull |= result.sawNull(); return result.ords(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java index e20373fff0a65..9e4dbfe943114 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java @@ -14,8 +14,9 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntArrayVector; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; @@ -55,13 +56,13 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } } - private LongVector add(LongVector vector1, LongVector vector2) { + private IntVector add(LongVector vector1, LongVector vector2) { int positions = vector1.getPositionCount(); - final long[] ords = new long[positions]; + final int[] ords = new int[positions]; for (int i = 0; i < positions; i++) { - ords[i] = hashOrdToGroup(hash.add(vector1.getLong(i), vector2.getLong(i))); + ords[i] = Math.toIntExact(hashOrdToGroup(hash.add(vector1.getLong(i), vector2.getLong(i)))); } - return new LongArrayVector(ords, positions); + return new IntArrayVector(ords, positions); } private static final long[] EMPTY = new long[0]; @@ -92,7 +93,7 @@ void add() { int count1 = block1.getValueCount(p); int count2 = block2.getValueCount(p); if (count1 == 1 && count2 == 1) { - ords.appendLong(hashOrdToGroup(hash.add(block1.getLong(start1), block2.getLong(start2)))); + ords.appendInt(Math.toIntExact(hashOrdToGroup(hash.add(block1.getLong(start1), block2.getLong(start2))))); addedValue(p); continue; } @@ -113,14 +114,14 @@ void add() { seenSize2 = LongLongBlockHash.add(seen2, seenSize2, block2.getLong(i)); } if (seenSize1 == 1 && seenSize2 == 1) { - ords.appendLong(hashOrdToGroup(hash.add(seen1[0], seen2[0]))); + ords.appendInt(Math.toIntExact(hashOrdToGroup(hash.add(seen1[0], seen2[0])))); addedValue(p); continue; } ords.beginPositionEntry(); for (int s1 = 0; s1 < seenSize1; s1++) { for (int s2 = 0; s2 < seenSize2; s2++) { - ords.appendLong(hashOrdToGroup(hash.add(seen1[s1], seen2[s2]))); + ords.appendInt(Math.toIntExact(hashOrdToGroup(hash.add(seen1[s1], seen2[s2])))); addedValueInMultivaluePosition(p); } } @@ -136,13 +137,13 @@ static class AbstractAddBlock { private int positionOffset = 0; private int added = 0; - protected LongBlock.Builder ords; + protected IntBlock.Builder ords; AbstractAddBlock(int emitBatchSize, GroupingAggregatorFunction.AddInput addInput) { this.emitBatchSize = emitBatchSize; this.addInput = addInput; - this.ords = LongBlock.newBlockBuilder(emitBatchSize); + this.ords = IntBlock.newBlockBuilder(emitBatchSize); } protected final void addedValue(int position) { @@ -166,7 +167,7 @@ protected final void emitOrds() { private void rollover(int position) { emitOrds(); positionOffset = position; - ords = LongBlock.newBlockBuilder(emitBatchSize); // TODO add a clear method to the builder? + ords = IntBlock.newBlockBuilder(emitBatchSize); // TODO add a clear method to the builder? } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java index 92b9be552e86b..bd86856ffab04 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java @@ -85,7 +85,7 @@ class AddWork extends LongLongBlockHash.AbstractAddBlock { int position; int count; - long bufferedGroup; + int bufferedGroup; AddWork(Page page, GroupingAggregatorFunction.AddInput addInput, int batchSize) { super(emitBatchSize, addInput); @@ -124,7 +124,7 @@ void add() { switch (count) { case 0 -> throw new IllegalStateException("didn't find any values"); case 1 -> { - ords.appendLong(bufferedGroup); + ords.appendInt(bufferedGroup); addedValue(position); } default -> ords.endPositionEntry(); @@ -171,18 +171,18 @@ private void addPosition(int g) { } private void addBytes() { - long group = hashOrdToGroup(bytesRefHash.add(bytes.get())); + int group = Math.toIntExact(hashOrdToGroup(bytesRefHash.add(bytes.get()))); switch (count) { case 0 -> bufferedGroup = group; case 1 -> { ords.beginPositionEntry(); - ords.appendLong(bufferedGroup); + ords.appendInt(bufferedGroup); addedValueInMultivaluePosition(position); - ords.appendLong(group); + ords.appendInt(group); addedValueInMultivaluePosition(position); } default -> { - ords.appendLong(group); + ords.appendInt(group); addedValueInMultivaluePosition(position); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockOrdinalsReader.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockOrdinalsReader.java index d55540a37b8a6..6a2625bf53845 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockOrdinalsReader.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockOrdinalsReader.java @@ -8,8 +8,8 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.index.SortedSetDocValues; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; import java.io.IOException; @@ -22,9 +22,9 @@ public BlockOrdinalsReader(SortedSetDocValues sortedSetDocValues) { this.creationThread = Thread.currentThread(); } - public LongBlock readOrdinals(IntVector docs) throws IOException { + public IntBlock readOrdinals(IntVector docs) throws IOException { final int positionCount = docs.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); for (int p = 0; p < positionCount; p++) { int doc = docs.getInt(p); if (false == sortedSetDocValues.advanceExact(doc)) { @@ -32,13 +32,14 @@ public LongBlock readOrdinals(IntVector docs) throws IOException { continue; } int count = sortedSetDocValues.docValueCount(); + // TODO don't come this way if there are a zillion ords on the field if (count == 1) { - builder.appendLong(sortedSetDocValues.nextOrd()); + builder.appendInt(Math.toIntExact(sortedSetDocValues.nextOrd())); continue; } builder.beginPositionEntry(); for (int i = 0; i < count; i++) { - builder.appendLong(sortedSetDocValues.nextOrd()); + builder.appendInt(Math.toIntExact(sortedSetDocValues.nextOrd())); } builder.endPositionEntry(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java index 1910cc4ec590c..5ca3c854c00b2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java @@ -14,9 +14,8 @@ import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasables; @@ -97,8 +96,8 @@ public void addInput(Page page) { blockHash.add(wrapPage(page), new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { - LongVector groupIdsVector = groupIds.asVector(); + public void add(int positionOffset, IntBlock groupIds) { + IntVector groupIdsVector = groupIds.asVector(); if (groupIdsVector != null) { add(positionOffset, groupIdsVector); } else { @@ -109,7 +108,7 @@ public void add(int positionOffset, LongBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { for (GroupingAggregatorFunction.AddInput p : prepared) { p.add(positionOffset, groupIds); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java index cb51dc53c1840..7cfb080dc8c3e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java @@ -125,7 +125,7 @@ public Block eval(Page page) { /** * Result of calling "hash" on a multivalue dedupe. */ - public record HashResult(LongBlock ords, boolean sawNull) {} + public record HashResult(IntBlock ords, boolean sawNull) {} /** * Build a {@link BatchEncoder} which deduplicates values at each position diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java index b4e7dd8914eb8..39f0bbedd6732 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; /** @@ -64,17 +65,17 @@ public BooleanBlock dedupeToBlock() { * as the grouping block to a {@link GroupingAggregatorFunction}. * @param everSeen array tracking if the values {@code false} and {@code true} are ever seen */ - public LongBlock hash(boolean[] everSeen) { - LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); + public IntBlock hash(boolean[] everSeen) { + IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); switch (count) { case 0 -> { everSeen[NULL_ORD] = true; - builder.appendLong(NULL_ORD); + builder.appendInt(NULL_ORD); } - case 1 -> builder.appendLong(hashOrd(everSeen, block.getBoolean(first))); + case 1 -> builder.appendInt(hashOrd(everSeen, block.getBoolean(first))); default -> { readValues(first, count); hashValues(everSeen, builder); @@ -156,18 +157,18 @@ private void writeValues(BooleanBlock.Builder builder) { } } - private void hashValues(boolean[] everSeen, LongBlock.Builder builder) { + private void hashValues(boolean[] everSeen, IntBlock.Builder builder) { if (seenFalse) { if (seenTrue) { builder.beginPositionEntry(); - builder.appendLong(hashOrd(everSeen, false)); - builder.appendLong(hashOrd(everSeen, true)); + builder.appendInt(hashOrd(everSeen, false)); + builder.appendInt(hashOrd(everSeen, true)); builder.endPositionEntry(); } else { - builder.appendLong(hashOrd(everSeen, false)); + builder.appendInt(hashOrd(everSeen, false)); } } else if (seenTrue) { - builder.appendLong(hashOrd(everSeen, true)); + builder.appendInt(hashOrd(everSeen, true)); } else { throw new IllegalStateException("didn't see true of false but counted values"); } @@ -185,7 +186,7 @@ private void encodeUniquedWork(BatchEncoder.Booleans encoder) { /** * Convert the boolean to an ordinal and track if it's been seen in {@code everSeen}. */ - public static long hashOrd(boolean[] everSeen, boolean b) { + public static int hashOrd(boolean[] everSeen, boolean b) { if (b) { everSeen[TRUE_ORD] = true; return TRUE_ORD; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index 3a1cf5fee3512..bc44a3a6d305c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -24,8 +24,8 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DocBlock; import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.BlockOrdinalsReader; import org.elasticsearch.compute.lucene.ValueSourceInfo; @@ -338,7 +338,7 @@ void addInput(IntVector docs, Page page) { if (BlockOrdinalsReader.canReuse(currentReader, docs.getInt(0)) == false) { currentReader = new BlockOrdinalsReader(withOrdinals.ordinalsValues(leafReaderContext)); } - final LongBlock ordinals = currentReader.readOrdinals(docs); + final IntBlock ordinals = currentReader.readOrdinals(docs); for (int p = 0; p < ordinals.getPositionCount(); p++) { if (ordinals.isNull(p)) { continue; @@ -346,7 +346,7 @@ void addInput(IntVector docs, Page page) { int start = ordinals.getFirstValueIndex(p); int end = start + ordinals.getValueCount(p); for (int i = start; i < end; i++) { - long ord = ordinals.getLong(i); + long ord = ordinals.getInt(i); visitedOrds.set(ord); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st index 7c4fdb7bebdec..337b095ebe8d0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st @@ -16,16 +16,21 @@ import org.elasticsearch.common.util.LongHash; $endif$ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.blockhash.BlockHash; -$if(long)$ +$if(int)$ import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; + +$elseif(long)$ +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; $else$ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.$Type$Block; -import org.elasticsearch.compute.data.LongBlock; -$endif$ +import org.elasticsearch.compute.data.IntBlock; +$endif$ import java.util.Arrays; /** @@ -173,7 +178,7 @@ $endif$ } /** - * Dedupe values and build a {@link LongBlock} suitable for passing + * Dedupe values and build a {@link IntBlock} suitable for passing * as the grouping block to a {@link GroupingAggregatorFunction}. */ $if(BytesRef)$ @@ -181,7 +186,7 @@ $if(BytesRef)$ $else$ public MultivalueDedupe.HashResult hash(LongHash hash) { $endif$ - LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); + IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); boolean sawNull = false; for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); @@ -189,7 +194,7 @@ $endif$ switch (count) { case 0 -> { sawNull = true; - builder.appendLong(0); + builder.appendInt(0); } case 1 -> { $if(BytesRef)$ @@ -389,9 +394,9 @@ $endif$ * Writes an already deduplicated {@link #work} to a hash. */ $if(BytesRef)$ - private void hashUniquedWork(BytesRefHash hash, LongBlock.Builder builder) { + private void hashUniquedWork(BytesRefHash hash, IntBlock.Builder builder) { $else$ - private void hashUniquedWork(LongHash hash, LongBlock.Builder builder) { + private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { $endif$ if (w == 1) { hash(builder, hash, work[0]); @@ -408,9 +413,9 @@ $endif$ * Writes a sorted {@link #work} to a hash, skipping duplicates. */ $if(BytesRef)$ - private void hashSortedWork(BytesRefHash hash, LongBlock.Builder builder) { + private void hashSortedWork(BytesRefHash hash, IntBlock.Builder builder) { $else$ - private void hashSortedWork(LongHash hash, LongBlock.Builder builder) { + private void hashSortedWork(LongHash hash, IntBlock.Builder builder) { $endif$ if (w == 1) { hash(builder, hash, work[0]); @@ -485,14 +490,14 @@ $if(BytesRef)$ $endif$ $if(BytesRef)$ - private void hash(LongBlock.Builder builder, BytesRefHash hash, BytesRef v) { + private void hash(IntBlock.Builder builder, BytesRefHash hash, BytesRef v) { $else$ - private void hash(LongBlock.Builder builder, LongHash hash, $type$ v) { + private void hash(IntBlock.Builder builder, LongHash hash, $type$ v) { $endif$ $if(double)$ - builder.appendLong(BlockHash.hashOrdToGroupNullReserved(hash.add(Double.doubleToLongBits(v)))); + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(Double.doubleToLongBits(v))))); $else$ - builder.appendLong(BlockHash.hashOrdToGroupNullReserved(hash.add(v))); + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(v)))); $endif$ } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index ac3edc4c61a88..002790b3735d2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -16,9 +16,9 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; @@ -473,9 +473,9 @@ public AddInput prepareProcessPage(SeenGroupIds ignoredSeenGroupIds, Page page) }, page); @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { for (int offset = 0; offset < groupIds.getPositionCount(); offset += emitChunkSize) { - LongBlock.Builder builder = LongBlock.newBlockBuilder(emitChunkSize); + IntBlock.Builder builder = IntBlock.newBlockBuilder(emitChunkSize); int endP = Math.min(groupIds.getPositionCount(), offset + emitChunkSize); for (int p = offset; p < endP; p++) { int start = groupIds.getFirstValueIndex(p); @@ -483,17 +483,19 @@ public void add(int positionOffset, LongBlock groupIds) { switch (count) { case 0 -> builder.appendNull(); case 1 -> { - long group = groupIds.getLong(start); + int group = groupIds.getInt(start); seenGroupIds.set(group); - builder.appendLong(group); + builder.appendInt(group); } default -> { int end = start + count; + builder.beginPositionEntry(); for (int i = start; i < end; i++) { - long group = groupIds.getLong(i); + int group = groupIds.getInt(i); seenGroupIds.set(group); - builder.appendLong(group); + builder.appendInt(group); } + builder.endPositionEntry(); } } } @@ -502,30 +504,30 @@ public void add(int positionOffset, LongBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { - long[] chunk = new long[emitChunkSize]; + public void add(int positionOffset, IntVector groupIds) { + int[] chunk = new int[emitChunkSize]; for (int offset = 0; offset < groupIds.getPositionCount(); offset += emitChunkSize) { int count = 0; for (int i = offset; i < Math.min(groupIds.getPositionCount(), offset + emitChunkSize); i++) { - long group = groupIds.getLong(i); + int group = groupIds.getInt(i); seenGroupIds.set(group); chunk[count++] = group; } - delegateAddInput.add(positionOffset + offset, new LongArrayVector(chunk, count)); + delegateAddInput.add(positionOffset + offset, new IntArrayVector(chunk, count)); } } }; } @Override - public void addIntermediateInput(int positionOffset, LongVector groupIds, Page page) { - long[] chunk = new long[emitChunkSize]; + public void addIntermediateInput(int positionOffset, IntVector groupIds, Page page) { + int[] chunk = new int[emitChunkSize]; for (int offset = 0; offset < groupIds.getPositionCount(); offset += emitChunkSize) { int count = 0; for (int i = offset; i < Math.min(groupIds.getPositionCount(), offset + emitChunkSize); i++) { - chunk[count++] = groupIds.getLong(i); + chunk[count++] = groupIds.getInt(i); } - delegate.addIntermediateInput(positionOffset + offset, new LongArrayVector(chunk, count), page); + delegate.addIntermediateInput(positionOffset + offset, new IntArrayVector(chunk, count), page); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 7c56691a3ae41..e2c848f2cd7ef 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -68,11 +67,11 @@ public void testIntHash() { OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT], entries=3, size=")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 0L, 1L, 2L, 0L, 1L, 2L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 1, 2, 0, 1, 2); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 3))); } else { assertThat(ordsAndKeys.description, equalTo("IntBlockHash{channel=0, entries=3, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1L, 2L, 3L, 1L, 2L, 3L, 1L, 2L, 3L); + assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 2, 3, 1, 2, 3); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 4))); } assertKeys(ordsAndKeys.keys, 1, 2, 3); @@ -88,11 +87,11 @@ public void testIntHashWithNulls() { OrdsAndKeys ordsAndKeys = hash(builder.build()); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT], entries=3, size=")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 1L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); assertKeys(ordsAndKeys.keys, 0, null, 2); } else { assertThat(ordsAndKeys.description, equalTo("IntBlockHash{channel=0, entries=2, seenNull=true}")); - assertOrds(ordsAndKeys.ords, 1L, 0L, 2L, 0L); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); assertKeys(ordsAndKeys.keys, null, 0, 2); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 3))); @@ -125,24 +124,24 @@ public void testIntHashWithMultiValuedFields() { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT], entries=4, size=")); assertOrds( ordsAndKeys.ords, - new long[] { 0 }, - new long[] { 0, 1 }, - new long[] { 2, 0 }, - new long[] { 2 }, - new long[] { 3 }, - new long[] { 2, 1, 0 } + new int[] { 0 }, + new int[] { 0, 1 }, + new int[] { 2, 0 }, + new int[] { 2 }, + new int[] { 3 }, + new int[] { 2, 1, 0 } ); assertKeys(ordsAndKeys.keys, 1, 2, 3, null); } else { assertThat(ordsAndKeys.description, equalTo("IntBlockHash{channel=0, entries=3, seenNull=true}")); assertOrds( ordsAndKeys.ords, - new long[] { 1 }, - new long[] { 1, 2 }, - new long[] { 3, 1 }, - new long[] { 3 }, - new long[] { 0 }, - new long[] { 3, 2, 1 } + new int[] { 1 }, + new int[] { 1, 2 }, + new int[] { 3, 1 }, + new int[] { 3 }, + new int[] { 0 }, + new int[] { 3, 2, 1 } ); assertKeys(ordsAndKeys.keys, null, 1, 2, 3); } @@ -156,11 +155,11 @@ public void testLongHash() { OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG], entries=4, size=")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 0L, 2L, 1L, 3L, 2L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 2, 1, 3, 2); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 4))); } else { assertThat(ordsAndKeys.description, equalTo("LongBlockHash{channel=0, entries=4, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1L, 2L, 3L, 1L, 3L, 2L, 4L, 3L); + assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 3, 2, 4, 3); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 5))); } assertKeys(ordsAndKeys.keys, 2L, 1L, 4L, 3L); @@ -176,11 +175,11 @@ public void testLongHashWithNulls() { OrdsAndKeys ordsAndKeys = hash(builder.build()); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG], entries=3, size=")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 1L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); assertKeys(ordsAndKeys.keys, 0L, null, 2L); } else { assertThat(ordsAndKeys.description, equalTo("LongBlockHash{channel=0, entries=2, seenNull=true}")); - assertOrds(ordsAndKeys.ords, 1L, 0L, 2L, 0L); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); assertKeys(ordsAndKeys.keys, null, 0L, 2L); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 3))); @@ -213,24 +212,24 @@ public void testLongHashWithMultiValuedFields() { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG], entries=4, size=")); assertOrds( ordsAndKeys.ords, - new long[] { 0 }, - new long[] { 0, 1, 2 }, - new long[] { 0 }, - new long[] { 2 }, - new long[] { 3 }, - new long[] { 2, 1, 0 } + new int[] { 0 }, + new int[] { 0, 1, 2 }, + new int[] { 0 }, + new int[] { 2 }, + new int[] { 3 }, + new int[] { 2, 1, 0 } ); assertKeys(ordsAndKeys.keys, 1L, 2L, 3L, null); } else { assertThat(ordsAndKeys.description, equalTo("LongBlockHash{channel=0, entries=3, seenNull=true}")); assertOrds( ordsAndKeys.ords, - new long[] { 1 }, - new long[] { 1, 2, 3 }, - new long[] { 1 }, - new long[] { 3 }, - new long[] { 0 }, - new long[] { 3, 2, 1 } + new int[] { 1 }, + new int[] { 1, 2, 3 }, + new int[] { 1 }, + new int[] { 3 }, + new int[] { 0 }, + new int[] { 3, 2, 1 } ); assertKeys(ordsAndKeys.keys, null, 1L, 2L, 3L); } @@ -244,11 +243,11 @@ public void testDoubleHash() { if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=4, size=")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 0L, 2L, 1L, 3L, 2L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 2, 1, 3, 2); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 4))); } else { assertThat(ordsAndKeys.description, equalTo("DoubleBlockHash{channel=0, entries=4, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1L, 2L, 3L, 1L, 3L, 2L, 4L, 3L); + assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 3, 2, 4, 3); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 5))); } assertKeys(ordsAndKeys.keys, 2.0, 1.0, 4.0, 3.0); @@ -264,11 +263,11 @@ public void testDoubleHashWithNulls() { OrdsAndKeys ordsAndKeys = hash(builder.build()); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=3, size=")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 1L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); assertKeys(ordsAndKeys.keys, 0.0, null, 2.0); } else { assertThat(ordsAndKeys.description, equalTo("DoubleBlockHash{channel=0, entries=2, seenNull=true}")); - assertOrds(ordsAndKeys.ords, 1L, 0L, 2L, 0L); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); assertKeys(ordsAndKeys.keys, null, 0.0, 2.0); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 3))); @@ -300,24 +299,24 @@ public void testDoubleHashWithMultiValuedFields() { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=4, size=")); assertOrds( ordsAndKeys.ords, - new long[] { 0 }, - new long[] { 1, 2 }, - new long[] { 2, 1 }, - new long[] { 0 }, - new long[] { 3 }, - new long[] { 0, 1 } + new int[] { 0 }, + new int[] { 1, 2 }, + new int[] { 2, 1 }, + new int[] { 0 }, + new int[] { 3 }, + new int[] { 0, 1 } ); assertKeys(ordsAndKeys.keys, 1.0, 2.0, 3.0, null); } else { assertThat(ordsAndKeys.description, equalTo("DoubleBlockHash{channel=0, entries=3, seenNull=true}")); assertOrds( ordsAndKeys.ords, - new long[] { 1 }, - new long[] { 2, 3 }, - new long[] { 3, 2 }, - new long[] { 1 }, - new long[] { 0 }, - new long[] { 1, 2 } + new int[] { 1 }, + new int[] { 2, 3 }, + new int[] { 3, 2 }, + new int[] { 1 }, + new int[] { 0 }, + new int[] { 1, 2 } ); assertKeys(ordsAndKeys.keys, null, 1.0, 2.0, 3.0); } @@ -339,12 +338,12 @@ public void testBasicBytesRefHash() { if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=4, size=")); assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 0L, 2L, 1L, 3L, 2L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 2, 1, 3, 2); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 4))); } else { assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=4, size=")); assertThat(ordsAndKeys.description, endsWith("b, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1L, 2L, 3L, 1L, 3L, 2L, 4L, 3L); + assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 3, 2, 4, 3); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 5))); } assertKeys(ordsAndKeys.keys, "item-2", "item-1", "item-4", "item-3"); @@ -361,12 +360,12 @@ public void testBytesRefHashWithNulls() { if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=3, size=")); assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 1L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); assertKeys(ordsAndKeys.keys, "cat", null, "dog"); } else { assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=2, size=")); assertThat(ordsAndKeys.description, endsWith("b, seenNull=true}")); - assertOrds(ordsAndKeys.ords, 1L, 0L, 2L, 0L); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); assertKeys(ordsAndKeys.keys, null, "cat", "dog"); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 3))); @@ -400,12 +399,12 @@ public void testBytesRefHashWithMultiValuedFields() { assertThat(ordsAndKeys.description, endsWith("b}")); assertOrds( ordsAndKeys.ords, - new long[] { 0 }, - new long[] { 0, 1 }, - new long[] { 1, 2 }, - new long[] { 2, 1 }, - new long[] { 3 }, - new long[] { 2, 1 } + new int[] { 0 }, + new int[] { 0, 1 }, + new int[] { 1, 2 }, + new int[] { 2, 1 }, + new int[] { 3 }, + new int[] { 2, 1 } ); assertKeys(ordsAndKeys.keys, "foo", "bar", "bort", null); } else { @@ -413,12 +412,12 @@ public void testBytesRefHashWithMultiValuedFields() { assertThat(ordsAndKeys.description, endsWith("b, seenNull=true}")); assertOrds( ordsAndKeys.ords, - new long[] { 1 }, - new long[] { 1, 2 }, - new long[] { 2, 3 }, - new long[] { 3, 2 }, - new long[] { 0 }, - new long[] { 3, 2 } + new int[] { 1 }, + new int[] { 1, 2 }, + new int[] { 2, 3 }, + new int[] { 3, 2 }, + new int[] { 0 }, + new int[] { 3, 2 } ); assertKeys(ordsAndKeys.keys, null, "foo", "bar", "bort"); } @@ -432,11 +431,11 @@ public void testBooleanHashFalseFirst() { OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=2, size=")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 1L, 1L, 1L); + assertOrds(ordsAndKeys.ords, 0, 1, 1, 1, 1); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 2))); } else { assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1L, 2L, 2L, 2L, 2L); + assertOrds(ordsAndKeys.ords, 1, 2, 2, 2, 2); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 3))); } assertKeys(ordsAndKeys.keys, false, true); @@ -449,12 +448,12 @@ public void testBooleanHashTrueFirst() { OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=2, size=")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 1L, 0L, 0L); + assertOrds(ordsAndKeys.ords, 0, 1, 1, 0, 0); assertKeys(ordsAndKeys.keys, true, false); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 2))); } else { assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 2L, 1L, 1L, 2L, 2L); + assertOrds(ordsAndKeys.ords, 2, 1, 1, 2, 2); assertKeys(ordsAndKeys.keys, false, true); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 3))); } @@ -467,12 +466,12 @@ public void testBooleanHashTrueOnly() { OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=1, size=")); - assertOrds(ordsAndKeys.ords, 0L, 0L, 0L, 0L); + assertOrds(ordsAndKeys.ords, 0, 0, 0, 0); assertKeys(ordsAndKeys.keys, true); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(0).build())); } else { assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=false, seenTrue=true, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 2L, 2L, 2L, 2L); + assertOrds(ordsAndKeys.ords, 2, 2, 2, 2); assertKeys(ordsAndKeys.keys, true); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(2).build())); } @@ -485,11 +484,11 @@ public void testBooleanHashFalseOnly() { OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=1, size=")); - assertOrds(ordsAndKeys.ords, 0L, 0L, 0L, 0L); + assertOrds(ordsAndKeys.ords, 0, 0, 0, 0); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(0).build())); } else { assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=false, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1L, 1L, 1L, 1L); + assertOrds(ordsAndKeys.ords, 1, 1, 1, 1); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(1).build())); } assertKeys(ordsAndKeys.keys, false); @@ -505,11 +504,11 @@ public void testBooleanHashWithNulls() { OrdsAndKeys ordsAndKeys = hash(builder.build()); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=3, size=")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 1L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); assertKeys(ordsAndKeys.keys, false, null, true); } else { assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=true}")); - assertOrds(ordsAndKeys.ords, 1L, 0L, 2L, 0L); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); assertKeys(ordsAndKeys.keys, null, false, true); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 3))); @@ -541,24 +540,24 @@ public void testBooleanHashWithMultiValuedFields() { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=3, size=")); assertOrds( ordsAndKeys.ords, - new long[] { 0 }, - new long[] { 0, 1 }, - new long[] { 0, 1 }, // Order is not preserved - new long[] { 1 }, - new long[] { 2 }, - new long[] { 0, 1 } + new int[] { 0 }, + new int[] { 0, 1 }, + new int[] { 0, 1 }, // Order is not preserved + new int[] { 1 }, + new int[] { 2 }, + new int[] { 0, 1 } ); assertKeys(ordsAndKeys.keys, false, true, null); } else { assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=true}")); assertOrds( ordsAndKeys.ords, - new long[] { 1 }, - new long[] { 1, 2 }, - new long[] { 1, 2 }, // Order is not preserved - new long[] { 2 }, - new long[] { 0 }, - new long[] { 1, 2 } + new int[] { 1 }, + new int[] { 1, 2 }, + new int[] { 1, 2 }, // Order is not preserved + new int[] { 2 }, + new int[] { 0 }, + new int[] { 1, 2 } ); assertKeys(ordsAndKeys.keys, null, false, true); } @@ -579,7 +578,7 @@ public void testLongLongHash() { ? startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=4, size=") : equalTo("LongLongBlockHash{channels=[0,1], entries=4}") ); - assertOrds(ordsAndKeys.ords, 0L, 1L, 0L, 2L, 3L, 2L); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); assertKeys(ordsAndKeys.keys, expectedKeys); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 4))); } @@ -627,15 +626,15 @@ public void testLongLongHashWithMultiValuedFields() { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=10, size=")); assertOrds( ordsAndKeys.ords, - new long[] { 0, 1, 2, 3 }, - new long[] { 0, 2 }, - new long[] { 0, 1 }, - new long[] { 0 }, - new long[] { 4 }, - new long[] { 5 }, - new long[] { 0 }, - new long[] { 0, 1, 2, 3 }, - new long[] { 6, 0, 7, 2, 8, 9 } + new int[] { 0, 1, 2, 3 }, + new int[] { 0, 2 }, + new int[] { 0, 1 }, + new int[] { 0 }, + new int[] { 4 }, + new int[] { 5 }, + new int[] { 0 }, + new int[] { 0, 1, 2, 3 }, + new int[] { 6, 0, 7, 2, 8, 9 } ); assertKeys( ordsAndKeys.keys, @@ -656,15 +655,15 @@ public void testLongLongHashWithMultiValuedFields() { assertThat(ordsAndKeys.description, equalTo("LongLongBlockHash{channels=[0,1], entries=8}")); assertOrds( ordsAndKeys.ords, - new long[] { 0, 1, 2, 3 }, - new long[] { 0, 2 }, - new long[] { 0, 1 }, - new long[] { 0 }, + new int[] { 0, 1, 2, 3 }, + new int[] { 0, 2 }, + new int[] { 0, 1 }, + new int[] { 0 }, null, null, - new long[] { 0 }, - new long[] { 0, 1, 2, 3 }, - new long[] { 4, 0, 5, 2, 6, 7 } + new int[] { 0 }, + new int[] { 0, 1, 2, 3 }, + new int[] { 4, 0, 5, 2, 6, 7 } ); assertKeys( ordsAndKeys.keys, @@ -701,7 +700,7 @@ public void testLongLongHashHugeCombinatorialExplosion() { ? startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=" + expectedEntries[0] + ", size=") : equalTo("LongLongBlockHash{channels=[0,1], entries=" + expectedEntries[0] + "}") ); - assertOrds(ordsAndKeys.ords, LongStream.range(start, expectedEntries[0]).toArray()); + assertOrds(ordsAndKeys.ords, IntStream.range(start, expectedEntries[0]).toArray()); assertKeys( ordsAndKeys.keys, IntStream.range(0, expectedEntries[0]) @@ -724,7 +723,7 @@ public void testIntLongHash() { OrdsAndKeys ordsAndKeys = hash(block1, block2); assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT, 1:LONG], entries=4, size=")); assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 0L, 2L, 3L, 2L); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); assertKeys(ordsAndKeys.keys, expectedKeys); } @@ -737,7 +736,7 @@ public void testLongDoubleHash() { OrdsAndKeys ordsAndKeys = hash(block1, block2); assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:DOUBLE], entries=4, size=")); assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 0L, 2L, 3L, 2L); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); assertKeys(ordsAndKeys.keys, expectedKeys); } @@ -755,7 +754,7 @@ public void testIntBooleanHash() { OrdsAndKeys ordsAndKeys = hash(block1, block2); assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT, 1:BOOLEAN], entries=4, size=")); assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 0L, 2L, 3L, 2L); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); assertKeys(ordsAndKeys.keys, expectedKeys); } @@ -776,7 +775,7 @@ public void testLongLongHashWithNull() { OrdsAndKeys ordsAndKeys = hash(b1.build(), b2.build()); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=5, size=")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 3L, 4L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 3, 4); assertKeys( ordsAndKeys.keys, new Object[][] { @@ -789,7 +788,7 @@ public void testLongLongHashWithNull() { assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 5))); } else { assertThat(ordsAndKeys.description, equalTo("LongLongBlockHash{channels=[0,1], entries=2}")); - assertOrds(ordsAndKeys.ords, 0L, null, 1L, null, null); + assertOrds(ordsAndKeys.ords, 0, null, 1, null, null); assertKeys(ordsAndKeys.keys, new Object[][] { new Object[] { 1L, 0L }, new Object[] { 0L, 1L } }); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 2))); } @@ -822,7 +821,7 @@ public void testLongBytesRefHash() { ) ); assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 0L, 2L, 3L, 2L); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); assertKeys(ordsAndKeys.keys, expectedKeys); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 4))); } @@ -845,7 +844,7 @@ public void testLongBytesRefHashWithNull() { if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:BYTES_REF], entries=5, size=")); assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0L, 1L, 2L, 3L, 4L); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 3, 4); assertKeys( ordsAndKeys.keys, new Object[][] { @@ -862,7 +861,7 @@ public void testLongBytesRefHashWithNull() { startsWith("BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=2, size=") ); assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0L, null, 1L, null, null); + assertOrds(ordsAndKeys.ords, 0, null, 1, null, null); assertKeys(ordsAndKeys.keys, new Object[][] { new Object[] { 1L, "cat" }, new Object[] { 0L, "dog" } }); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 2))); } @@ -911,15 +910,15 @@ public void testLongBytesRefHashWithMultiValuedFields() { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:BYTES_REF], entries=10, size=")); assertOrds( ordsAndKeys.ords, - new long[] { 0, 1, 2, 3 }, - new long[] { 0, 2 }, - new long[] { 0, 1 }, - new long[] { 0 }, - new long[] { 4 }, - new long[] { 5 }, - new long[] { 0 }, - new long[] { 0, 1, 2, 3 }, - new long[] { 6, 0, 7, 2, 8, 9 } + new int[] { 0, 1, 2, 3 }, + new int[] { 0, 2 }, + new int[] { 0, 1 }, + new int[] { 0 }, + new int[] { 4 }, + new int[] { 5 }, + new int[] { 0 }, + new int[] { 0, 1, 2, 3 }, + new int[] { 6, 0, 7, 2, 8, 9 } ); assertKeys( ordsAndKeys.keys, @@ -943,15 +942,15 @@ public void testLongBytesRefHashWithMultiValuedFields() { ); assertOrds( ordsAndKeys.ords, - new long[] { 0, 1, 2, 3 }, - new long[] { 0, 1 }, - new long[] { 0, 2 }, - new long[] { 0 }, + new int[] { 0, 1, 2, 3 }, + new int[] { 0, 1 }, + new int[] { 0, 2 }, + new int[] { 0 }, null, null, - new long[] { 0 }, - new long[] { 0, 1, 2, 3 }, - new long[] { 4, 5, 6, 0, 1, 7 } + new int[] { 0 }, + new int[] { 0, 1, 2, 3 }, + new int[] { 4, 5, 6, 0, 1, 7 } ); assertKeys( ordsAndKeys.keys, @@ -990,7 +989,7 @@ public void testBytesRefLongHashHugeCombinatorialExplosion() { "BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=" + expectedEntries[0] + ", size=" ) ); - assertOrds(ordsAndKeys.ords, LongStream.range(start, expectedEntries[0]).toArray()); + assertOrds(ordsAndKeys.ords, IntStream.range(start, expectedEntries[0]).toArray()); assertKeys( ordsAndKeys.keys, IntStream.range(0, expectedEntries[0]) @@ -1007,7 +1006,7 @@ public void testBytesRefLongHashHugeCombinatorialExplosion() { assertThat("misconfigured test", expectedEntries[0], greaterThan(0)); } - record OrdsAndKeys(String description, int positionOffset, LongBlock ords, Block[] keys, IntVector nonEmpty) {} + record OrdsAndKeys(String description, int positionOffset, IntBlock ords, Block[] keys, IntVector nonEmpty) {} /** * Hash some values into a single block of group ids. If the hash produces @@ -1042,7 +1041,7 @@ private void hash(Consumer callback, int emitBatchSize, Block... va static void hash(boolean collectKeys, BlockHash blockHash, Consumer callback, Block... values) { blockHash.add(new Page(values), new GroupingAggregatorFunction.AddInput() { @Override - public void add(int positionOffset, LongBlock groupIds) { + public void add(int positionOffset, IntBlock groupIds) { OrdsAndKeys result = new OrdsAndKeys( blockHash.toString(), positionOffset, @@ -1051,9 +1050,9 @@ public void add(int positionOffset, LongBlock groupIds) { blockHash.nonEmpty() ); - Set allowedOrds = new HashSet<>(); + Set allowedOrds = new HashSet<>(); for (int p = 0; p < result.nonEmpty.getPositionCount(); p++) { - allowedOrds.add(Long.valueOf(result.nonEmpty.getInt(p))); + allowedOrds.add(result.nonEmpty.getInt(p)); } for (int p = 0; p < result.ords.getPositionCount(); p++) { if (result.ords.isNull(p)) { @@ -1062,7 +1061,7 @@ public void add(int positionOffset, LongBlock groupIds) { int start = result.ords.getFirstValueIndex(p); int end = start + result.ords.getValueCount(p); for (int i = start; i < end; i++) { - long ord = result.ords.getLong(i); + int ord = result.ords.getInt(i); if (false == allowedOrds.contains(ord)) { fail("ord is not allowed " + ord); } @@ -1072,17 +1071,17 @@ public void add(int positionOffset, LongBlock groupIds) { } @Override - public void add(int positionOffset, LongVector groupIds) { + public void add(int positionOffset, IntVector groupIds) { add(positionOffset, groupIds.asBlock()); } }); } - private void assertOrds(LongBlock ordsBlock, Long... expectedOrds) { - assertOrds(ordsBlock, Arrays.stream(expectedOrds).map(l -> l == null ? null : new long[] { l }).toArray(long[][]::new)); + private void assertOrds(IntBlock ordsBlock, Integer... expectedOrds) { + assertOrds(ordsBlock, Arrays.stream(expectedOrds).map(l -> l == null ? null : new int[] { l }).toArray(int[][]::new)); } - private void assertOrds(LongBlock ordsBlock, long[]... expectedOrds) { + private void assertOrds(IntBlock ordsBlock, int[]... expectedOrds) { assertEquals(expectedOrds.length, ordsBlock.getPositionCount()); for (int p = 0; p < expectedOrds.length; p++) { int start = ordsBlock.getFirstValueIndex(p); @@ -1096,16 +1095,16 @@ private void assertOrds(LongBlock ordsBlock, long[]... expectedOrds) { if (i != 0) { error.append(", "); } - error.append(ordsBlock.getLong(start + i)); + error.append(ordsBlock.getInt(start + i)); } fail(error.append("]").toString()); } continue; } assertFalse(p + ": expected not null", ordsBlock.isNull(p)); - long[] actual = new long[count]; + int[] actual = new int[count]; for (int i = 0; i < count; i++) { - actual[i] = ordsBlock.getLong(start + i); + actual[i] = ordsBlock.getInt(start + i); } assertThat("position " + p, actual, equalTo(expectedOrds[p])); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java index b616b9f9bff7e..e45b93b3180dc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java @@ -252,7 +252,7 @@ private void assertBooleanHash(Set previousValues, BasicBlockTests.Rand if (previousValues.contains(true)) { everSeen[2] = true; } - LongBlock hashes = new MultivalueDedupeBoolean((BooleanBlock) b.block()).hash(everSeen); + IntBlock hashes = new MultivalueDedupeBoolean((BooleanBlock) b.block()).hash(everSeen); List hashedValues = new ArrayList<>(); if (everSeen[1]) { hashedValues.add(false); @@ -297,7 +297,7 @@ private void assertDoubleHash(Set previousValues, BasicBlockTests.Random private void assertHash( BasicBlockTests.RandomBlock b, - LongBlock hashes, + IntBlock hashes, long hashSize, Set previousValues, LongFunction lookup @@ -311,13 +311,13 @@ private void assertHash( List v = b.values().get(p); if (v == null) { assertThat(count, equalTo(1)); - assertThat(hashes.getLong(start), equalTo(0L)); + assertThat(hashes.getInt(start), equalTo(0)); return; } List actualValues = new ArrayList<>(count); int end = start + count; for (int i = start; i < end; i++) { - actualValues.add(lookup.apply(hashes.getLong(i) - 1)); + actualValues.add(lookup.apply(hashes.getInt(i) - 1)); } assertThat(actualValues, containsInAnyOrder(v.stream().collect(Collectors.toSet()).stream().sorted().toArray())); allValues.addAll(v); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index 54a42eda23fa4..aa4bcac3fc9be 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -930,9 +930,23 @@ TAU():double // end::tau-result[] ; +ceil +// tag::ceil[] +ROW a=1.8 +| EVAL a=CEIL(a) +// end::ceil[] +; + +// tag::ceil-result[] +a:double +2 +// end::ceil-result[] +; + floor // tag::floor[] -ROW a=1.8 | EVAL a=FLOOR(a) +ROW a=1.8 +| EVAL a=FLOOR(a) // end::floor[] ; @@ -942,6 +956,42 @@ a:double // end::floor-result[] ; +ceilFloorOfInfinite +row i = 1.0/0.0 | eval c = ceil(i), f = floor(i); + +i:double | c:double | f:double +Infinity | Infinity | Infinity +; + +ceilFloorOfNegativeInfinite +row i = -1.0/0.0 | eval c = ceil(i), f = floor(i); + +i:double | c:double | f:double +-Infinity | -Infinity | -Infinity +; + + +ceilFloorOfInteger +row i = 1 | eval c = ceil(i), f = floor(i); + +i:integer | c:integer | f:integer +1 | 1 | 1 +; + +ceilFloorOfLong +row i = to_long(1000000000000) | eval c = ceil(i), f = floor(i); + +i:long | c:long | f:long +1000000000000 | 1000000000000 | 1000000000000 +; + +ceilFloorOfUnsignedLong +row i = to_ul(1000000000000000000) | eval c = ceil(i), f = floor(i); + +i:ul | c:ul | f:ul +1000000000000000000 | 1000000000000000000 | 1000000000000000000 +; + sqrt // tag::sqrt[] ROW d = 100.0 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 05f5cc96701f7..c684e93cb986f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -10,18 +10,19 @@ show functions; name:keyword | synopsis:keyword abs |abs(n) -acos |acos(arg1) -asin |asin(arg1) -atan |atan(arg1) -atan2 |atan2(arg1, arg2) +acos |acos(n) +asin |asin(n) +atan |atan(n) +atan2 |atan2(y, x) auto_bucket |auto_bucket(arg1, arg2, arg3, arg4) avg |avg(arg1) case |case(arg1, arg2...) +ceil |ceil(n) cidr_match |cidr_match(arg1, arg2...) coalesce |coalesce(arg1, arg2...) concat |concat(arg1, arg2...) -cos |cos(arg1) -cosh |cosh(arg1) +cos |cos(n) +cosh |cosh(n) count |count(arg1) count_distinct |count_distinct(arg1, arg2) date_extract |date_extract(arg1, arg2) @@ -29,14 +30,15 @@ date_format |date_format(arg1, arg2) date_parse |date_parse(arg1, arg2) date_trunc |date_trunc(arg1, arg2) e |e() -floor |floor(arg1) +floor |floor(n) greatest |greatest(first, rest...) is_finite |is_finite(arg1) is_infinite |is_infinite(arg1) is_nan |is_nan(arg1) -least |least(arg1, arg2...) +least |least(first, rest...) +left |left(arg1, arg2) length |length(arg1) -log10 |log10(arg1) +log10 |log10(n) ltrim |ltrim(arg1) max |max(arg1) median |median(arg1) @@ -56,15 +58,15 @@ pi |pi() pow |pow(base, exponent) round |round(arg1, arg2) rtrim |rtrim(arg1) -sin |sin(arg1) -sinh |sinh(arg1) +sin |sin(n) +sinh |sinh(n) split |split(arg1, arg2) -sqrt |sqrt(arg1) +sqrt |sqrt(n) starts_with |starts_with(arg1, arg2) substring |substring(arg1, arg2, arg3) sum |sum(arg1) -tan |tan(arg1) -tanh |tanh(arg1) +tan |tan(n) +tanh |tanh(n) tau |tau() to_bool |to_bool(arg1) to_boolean |to_boolean(arg1) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 51bd57d9dc1da..e12f3190523b2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -700,3 +700,23 @@ Gateway | instances Gateway | instances null | null ; + +left +// tag::left[] +FROM employees +| KEEP last_name +| EVAL left = LEFT(last_name, 3) +| SORT last_name ASC +| LIMIT 5 +// end::left[] +; + +// tag::left-result[] +last_name:keyword | left:keyword +Awdeh |Awd +Azuma |Azu +Baek |Bae +Bamford |Bam +Bernatsky |Ber +// end::left-result[] +; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java new file mode 100644 index 0000000000000..a951cb8c30b0b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -0,0 +1,64 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Ceil}. + * This class is generated. Do not edit it. + */ +public final class CeilDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final EvalOperator.ExpressionEvaluator val; + + public CeilDoubleEvaluator(EvalOperator.ExpressionEvaluator val) { + this.val = val; + } + + @Override + public Block eval(Page page) { + Block valUncastBlock = val.eval(page); + if (valUncastBlock.areAllValuesNull()) { + return Block.constantNullBlock(page.getPositionCount()); + } + DoubleBlock valBlock = (DoubleBlock) valUncastBlock; + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { + DoubleBlock.Builder result = DoubleBlock.newBlockBuilder(positionCount); + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + result.appendNull(); + continue position; + } + result.appendDouble(Ceil.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + + public DoubleVector eval(int positionCount, DoubleVector valVector) { + DoubleVector.Builder result = DoubleVector.newVectorBuilder(positionCount); + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(Ceil.process(valVector.getDouble(p))); + } + return result.build(); + } + + @Override + public String toString() { + return "CeilDoubleEvaluator[" + "val=" + val + "]"; + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java new file mode 100644 index 0000000000000..e12d988d8b5ab --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -0,0 +1,105 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Left}. + * This class is generated. Do not edit it. + */ +public final class LeftEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final BytesRef out; + + private final EvalOperator.ExpressionEvaluator str; + + private final EvalOperator.ExpressionEvaluator length; + + public LeftEvaluator(Source source, BytesRef out, EvalOperator.ExpressionEvaluator str, + EvalOperator.ExpressionEvaluator length) { + this.warnings = new Warnings(source); + this.out = out; + this.str = str; + this.length = length; + } + + @Override + public Block eval(Page page) { + Block strUncastBlock = str.eval(page); + if (strUncastBlock.areAllValuesNull()) { + return Block.constantNullBlock(page.getPositionCount()); + } + BytesRefBlock strBlock = (BytesRefBlock) strUncastBlock; + Block lengthUncastBlock = length.eval(page); + if (lengthUncastBlock.areAllValuesNull()) { + return Block.constantNullBlock(page.getPositionCount()); + } + IntBlock lengthBlock = (IntBlock) lengthUncastBlock; + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return eval(page.getPositionCount(), strBlock, lengthBlock); + } + IntVector lengthVector = lengthBlock.asVector(); + if (lengthVector == null) { + return eval(page.getPositionCount(), strBlock, lengthBlock); + } + return eval(page.getPositionCount(), strVector, lengthVector); + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock lengthBlock) { + BytesRefBlock.Builder result = BytesRefBlock.newBlockBuilder(positionCount); + BytesRef strScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + result.appendNull(); + continue position; + } + if (lengthBlock.isNull(p) || lengthBlock.getValueCount(p) != 1) { + result.appendNull(); + continue position; + } + try { + result.appendBytesRef(Left.process(out, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), lengthBlock.getInt(lengthBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + + public BytesRefBlock eval(int positionCount, BytesRefVector strVector, IntVector lengthVector) { + BytesRefBlock.Builder result = BytesRefBlock.newBlockBuilder(positionCount); + BytesRef strScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBytesRef(Left.process(out, strVector.getBytesRef(p, strScratch), lengthVector.getInt(p))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + + @Override + public String toString() { + return "LeftEvaluator[" + "out=" + out + ", str=" + str + ", length=" + length + "]"; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 1c5776a510f53..de4748537efa2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; import org.elasticsearch.xpack.esql.expression.function.scalar.math.AutoBucket; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; @@ -70,6 +71,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; @@ -112,6 +114,7 @@ private FunctionDefinition[][] functions() { def(Atan.class, Atan::new, "atan"), def(Atan2.class, Atan2::new, "atan2"), def(AutoBucket.class, AutoBucket::new, "auto_bucket"), + def(Ceil.class, Ceil::new, "ceil"), def(Cos.class, Cos::new, "cos"), def(Cosh.class, Cosh::new, "cosh"), def(E.class, E::new, "e"), @@ -139,6 +142,7 @@ private FunctionDefinition[][] functions() { def(LTrim.class, LTrim::new, "ltrim"), def(RTrim.class, RTrim::new, "rtrim"), def(Trim.class, Trim::new, "trim"), + def(Left.class, Left::new, "left"), def(StartsWith.class, StartsWith::new, "starts_with") }, // date new FunctionDefinition[] { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 4827812274a75..2871b2c54cac3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMinBooleanEvaluator; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMinBytesRefEvaluator; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMinDoubleEvaluator; @@ -41,7 +42,7 @@ public class Least extends ScalarFunction implements EvaluatorMapper, OptionalArgument { private DataType dataType; - public Least(Source source, Expression first, List rest) { + public Least(Source source, @Named("first") Expression first, @Named("rest") List rest) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java index 50629f5ae0c32..75fb4571c9ff6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,8 +20,8 @@ * Inverse cosine trigonometric function. */ public class Acos extends AbstractTrigonometricFunction { - public Acos(Source source, Expression field) { - super(source, field); + public Acos(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java index 80b382c591695..cc964c92f7c61 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,8 +20,8 @@ * Inverse cosine trigonometric function. */ public class Asin extends AbstractTrigonometricFunction { - public Asin(Source source, Expression field) { - super(source, field); + public Asin(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java index f3a74a415ee00..7cacd88495764 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,8 +20,8 @@ * Inverse cosine trigonometric function. */ public class Atan extends AbstractTrigonometricFunction { - public Atan(Source source, Expression field) { - super(source, field); + public Atan(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index 6caa5d6127c2d..9c30c48b3c2ce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.EsqlUnsupportedOperationException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -34,7 +35,7 @@ public class Atan2 extends ScalarFunction implements EvaluatorMapper { private final Expression y; private final Expression x; - public Atan2(Source source, Expression y, Expression x) { + public Atan2(Source source, @Named("y") Expression y, @Named("x") Expression x) { super(source, List.of(y, x)); this.y = y; this.x = x; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java new file mode 100644 index 0000000000000..097a4e3086e1f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Named; +import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; + +/** + * Round a number up to the nearest integer. + *

+ * Note that doubles are rounded up to the nearest valid double that is + * an integer ala {@link Math#ceil}. + *

+ */ +public class Ceil extends UnaryScalarFunction implements EvaluatorMapper { + public Ceil(Source source, @Named("n") Expression n) { + super(source, n); + } + + @Override + public Supplier toEvaluator( + Function> toEvaluator + ) { + if (dataType().isInteger()) { + return toEvaluator.apply(field()); + } + Supplier fieldEval = toEvaluator.apply(field()); + return () -> new CeilDoubleEvaluator(fieldEval.get()); + } + + @Override + public Object fold() { + return EvaluatorMapper.super.fold(); + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + return isNumeric(field, sourceText(), DEFAULT); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Ceil(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Ceil::new, field()); + } + + @Evaluator(extraName = "Double") + static double process(double val) { + return Math.ceil(val); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java index d9b3a592bcef1..90a54ea5de06b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,8 +20,8 @@ * Cosine trigonometric function. */ public class Cos extends AbstractTrigonometricFunction { - public Cos(Source source, Expression field) { - super(source, field); + public Cos(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java index 826334ed98ef6..5f9e72e80d097 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,8 +20,8 @@ * Cosine hyperbolic function. */ public class Cosh extends AbstractTrigonometricFunction { - public Cosh(Source source, Expression field) { - super(source, field); + public Cosh(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java index b91aaf5d86abf..7624bf7a1db77 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -30,8 +31,8 @@ *

*/ public class Floor extends UnaryScalarFunction implements EvaluatorMapper { - public Floor(Source source, Expression field) { - super(source, field); + public Floor(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index a955f54bba4ac..42d9cbbd41767 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.EsqlUnsupportedOperationException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -27,8 +28,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; public class Log10 extends UnaryScalarFunction implements EvaluatorMapper { - public Log10(Source source, Expression field) { - super(source, field); + public Log10(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index 1339e0a2130c8..2a5f709d178d2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,8 +20,8 @@ * Sine trigonometric function. */ public class Sin extends AbstractTrigonometricFunction { - public Sin(Source source, Expression field) { - super(source, field); + public Sin(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java index d40d6bee299a5..4184ee1c99b83 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,8 +20,8 @@ * Sine hyperbolic function. */ public class Sinh extends AbstractTrigonometricFunction { - public Sinh(Source source, Expression field) { - super(source, field); + public Sinh(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index 1eea640734789..2a18146a821ce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.EsqlUnsupportedOperationException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -27,8 +28,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; public class Sqrt extends UnaryScalarFunction implements EvaluatorMapper { - public Sqrt(Source source, Expression field) { - super(source, field); + public Sqrt(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java index 40d0ee5d9de64..8a5047ac5764b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,8 +20,8 @@ * Tangent trigonometric function. */ public class Tan extends AbstractTrigonometricFunction { - public Tan(Source source, Expression field) { - super(source, field); + public Tan(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java index 31876aff69b33..6081ab4a1493f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,8 +20,8 @@ * Tangent hyperbolic function. */ public class Tanh extends AbstractTrigonometricFunction { - public Tanh(Source source, Expression field) { - super(source, field); + public Tanh(Source source, @Named("n") Expression n) { + super(source, n); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index 8fd6abce75455..ce28765b21b43 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -38,9 +38,11 @@ * Open Elasticsearch in IntelliJ. * *
  • - * Open {@code x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java} - * and run it. IntelliJ will take a few minutes to compile everything but the test itself - * should take only a few seconds. This is a fast path to running ESQL's integration tests. + * Run the csv tests (see {@code x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java}) + * from within Intellij or, alternatively, via Gradle: + * {@code ./gradlew -p x-pack/plugin/esql test --tests "org.elasticsearch.xpack.esql.CsvTests"} + * IntelliJ will take a few minutes to compile everything but the test itself should take only a few seconds. + * This is a fast path to running ESQL's integration tests. *
  • *
  • * Pick one of the csv-spec files in {@code x-pack/plugin/esql/qa/testFixtures/src/main/resources/} @@ -121,6 +123,15 @@ * asciidoc ceremony to make the result look right in the rendered docs. *
  • *
  • + * Auto-generate a syntax diagram and a table with supported types by running + * {@code ./gradlew x-pack:plugin:esql:copyGeneratedDocs} + * The generated files can be found here + * {@code docs/reference/esql/functions/signature/myfunction.svg } + * and here + * {@code docs/reference/esql/functions/types/myfunction.asciidoc} + * Make sure to commit them and reference them in your doc file. + *
  • + *
  • * Build the docs by cloning the docs repo * and running: *
    {@code
    diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java
    new file mode 100644
    index 0000000000000..9c1133769f846
    --- /dev/null
    +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java
    @@ -0,0 +1,127 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License
    + * 2.0; you may not use this file except in compliance with the Elastic License
    + * 2.0.
    + */
    +
    +package org.elasticsearch.xpack.esql.expression.function.scalar.string;
    +
    +import org.apache.lucene.util.BytesRef;
    +import org.apache.lucene.util.UnicodeUtil;
    +import org.elasticsearch.compute.ann.Evaluator;
    +import org.elasticsearch.compute.ann.Fixed;
    +import org.elasticsearch.compute.operator.EvalOperator;
    +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper;
    +import org.elasticsearch.xpack.ql.expression.Expression;
    +import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction;
    +import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate;
    +import org.elasticsearch.xpack.ql.tree.NodeInfo;
    +import org.elasticsearch.xpack.ql.tree.Source;
    +import org.elasticsearch.xpack.ql.type.DataType;
    +import org.elasticsearch.xpack.ql.type.DataTypes;
    +
    +import java.util.Arrays;
    +import java.util.List;
    +import java.util.function.Function;
    +import java.util.function.Supplier;
    +
    +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST;
    +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND;
    +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger;
    +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString;
    +
    +/**
    + * left(foo, len) is a alias that substring(foo, 0, len)
    + */
    +public class Left extends ScalarFunction implements EvaluatorMapper {
    +
    +    private final Source source;
    +
    +    private final Expression str;
    +
    +    private final Expression length;
    +
    +    public Left(Source source, Expression str, Expression length) {
    +        super(source, Arrays.asList(str, length));
    +        this.source = source;
    +        this.str = str;
    +        this.length = length;
    +    }
    +
    +    @Evaluator(warnExceptions = IllegalArgumentException.class)
    +    static BytesRef process(@Fixed BytesRef out, BytesRef str, int length) {
    +        out.bytes = str.bytes;
    +        out.offset = str.offset;
    +        out.length = str.length;
    +        int curLenStart = 0;
    +        UnicodeUtil.UTF8CodePoint cp = new UnicodeUtil.UTF8CodePoint();
    +        for (int i = 0; i < length && curLenStart < out.length; i++, curLenStart += cp.numBytes) {
    +            UnicodeUtil.codePointAt(out.bytes, out.offset + curLenStart, cp);
    +        }
    +        out.length = Math.min(curLenStart, out.length);
    +        return out;
    +    }
    +
    +    @Override
    +    public Supplier toEvaluator(
    +        Function> toEvaluator
    +    ) {
    +
    +        Supplier strSupplier = toEvaluator.apply(str);
    +        Supplier lengthSupplier = toEvaluator.apply(length);
    +        return () -> {
    +            BytesRef out = new BytesRef();
    +            return new LeftEvaluator(source, out, strSupplier.get(), lengthSupplier.get());
    +        };
    +    }
    +
    +    @Override
    +    public Expression replaceChildren(List newChildren) {
    +        return new Left(source(), newChildren.get(0), newChildren.get(1));
    +    }
    +
    +    @Override
    +    protected NodeInfo info() {
    +        return NodeInfo.create(this, Left::new, str, length);
    +    }
    +
    +    @Override
    +    public DataType dataType() {
    +        return DataTypes.KEYWORD;
    +    }
    +
    +    @Override
    +    protected TypeResolution resolveType() {
    +        if (childrenResolved() == false) {
    +            return new TypeResolution("Unresolved children");
    +        }
    +
    +        TypeResolution resolution = isString(str, sourceText(), FIRST);
    +        if (resolution.unresolved()) {
    +            return resolution;
    +        }
    +
    +        resolution = isInteger(length, sourceText(), SECOND);
    +        if (resolution.unresolved()) {
    +            return resolution;
    +        }
    +
    +        return TypeResolution.TYPE_RESOLVED;
    +    }
    +
    +    @Override
    +    public boolean foldable() {
    +        return str.foldable() && length.foldable();
    +    }
    +
    +    @Override
    +    public Object fold() {
    +        return EvaluatorMapper.super.fold();
    +    }
    +
    +    @Override
    +    public ScriptTemplate asScript() {
    +        throw new UnsupportedOperationException();
    +    }
    +}
    diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java
    index 599cc5d3b75a8..ff3b75e60245e 100644
    --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java
    +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java
    @@ -52,6 +52,7 @@
     import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan;
     import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2;
     import org.elasticsearch.xpack.esql.expression.function.scalar.math.AutoBucket;
    +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil;
     import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos;
     import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh;
     import org.elasticsearch.xpack.esql.expression.function.scalar.math.E;
    @@ -81,6 +82,7 @@
     import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce;
     import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat;
     import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim;
    +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left;
     import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length;
     import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim;
     import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split;
    @@ -294,6 +296,7 @@ public static List namedTypeEntries() {
                 of(ESQL_UNARY_SCLR_CLS, Acos.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
                 of(ESQL_UNARY_SCLR_CLS, Asin.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
                 of(ESQL_UNARY_SCLR_CLS, Atan.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
    +            of(ESQL_UNARY_SCLR_CLS, Ceil.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
                 of(ESQL_UNARY_SCLR_CLS, Cos.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
                 of(ESQL_UNARY_SCLR_CLS, Cosh.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
                 of(ESQL_UNARY_SCLR_CLS, Floor.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
    @@ -341,6 +344,7 @@ public static List namedTypeEntries() {
                 of(ScalarFunction.class, Pow.class, PlanNamedTypes::writePow, PlanNamedTypes::readPow),
                 of(ScalarFunction.class, StartsWith.class, PlanNamedTypes::writeStartsWith, PlanNamedTypes::readStartsWith),
                 of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring),
    +            of(ScalarFunction.class, Left.class, PlanNamedTypes::writeLeft, PlanNamedTypes::readLeft),
                 of(ScalarFunction.class, Split.class, PlanNamedTypes::writeSplit, PlanNamedTypes::readSplit),
                 of(ScalarFunction.class, Tau.class, PlanNamedTypes::writeNoArgScalar, PlanNamedTypes::readNoArgScalar),
                 // ArithmeticOperations
    @@ -1033,6 +1037,7 @@ static void writeBinaryLogic(PlanStreamOutput out, BinaryLogic binaryLogic) thro
             entry(name(Acos.class), Acos::new),
             entry(name(Asin.class), Asin::new),
             entry(name(Atan.class), Atan::new),
    +        entry(name(Ceil.class), Ceil::new),
             entry(name(Cos.class), Cos::new),
             entry(name(Cosh.class), Cosh::new),
             entry(name(Floor.class), Floor::new),
    @@ -1270,6 +1275,17 @@ static void writeSubstring(PlanStreamOutput out, Substring substring) throws IOE
             out.writeOptionalWriteable(fields.size() == 3 ? o -> out.writeExpression(fields.get(2)) : null);
         }
     
    +    static Left readLeft(PlanStreamInput in) throws IOException {
    +        return new Left(Source.EMPTY, in.readExpression(), in.readExpression());
    +    }
    +
    +    static void writeLeft(PlanStreamOutput out, Left left) throws IOException {
    +        List fields = left.children();
    +        assert fields.size() == 2;
    +        out.writeExpression(fields.get(0));
    +        out.writeExpression(fields.get(1));
    +    }
    +
         static Split readSplit(PlanStreamInput in) throws IOException {
             return new Split(Source.EMPTY, in.readExpression(), in.readExpression());
         }
    diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java
    index 87bbdfde39d03..4dec26888dc71 100644
    --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java
    +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java
    @@ -60,7 +60,11 @@
     import java.util.concurrent.Executors;
     import java.util.concurrent.Future;
     import java.util.function.DoubleBinaryOperator;
    +import java.util.function.DoubleFunction;
     import java.util.function.DoubleUnaryOperator;
    +import java.util.function.Function;
    +import java.util.function.IntFunction;
    +import java.util.function.LongFunction;
     import java.util.function.Supplier;
     import java.util.stream.Collectors;
     import java.util.stream.IntStream;
    @@ -222,34 +226,32 @@ public String toString() {
             }
     
             /**
    -         * Generate positive test cases for binary functions that operate on an {@code numeric}
    +         * Generate positive test cases for unary functions that operate on an {@code numeric}
              * fields by casting them to {@link DataTypes#DOUBLE}s.
              */
             public static List forUnaryCastingToDouble(String name, String argName, DoubleUnaryOperator expected) {
    +            String read = "Attribute[channel=0]";
    +            String eval = name + "[" + argName + "=";
                 List suppliers = new ArrayList<>();
    -            for (DataType type : EsqlDataTypes.types()) {
    -                if (type.isNumeric() == false || EsqlDataTypes.isRepresentable(type) == false) {
    -                    continue;
    -                }
    -                for (Map.Entry> supplier : RANDOM_VALUE_SUPPLIERS.get(type)) {
    -                    suppliers.add(new TestCaseSupplier(supplier.getKey(), List.of(type), () -> {
    -                        Number value = (Number) supplier.getValue().get();
    -                        TypedData typed = new TypedData(
    -                            // TODO there has to be a better way to handle unsigned long
    -                            value instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : value,
    -                            type,
    -                            "value"
    -                        );
    -                        String evalName = castToDoubleEvaluator("Attribute[channel=0]", type);
    -                        return new TestCase(
    -                            List.of(typed),
    -                            name + "[" + argName + "=" + evalName + "]",
    -                            DataTypes.DOUBLE,
    -                            equalTo(expected.applyAsDouble(value.doubleValue()))
    -                        );
    -                    }));
    -                }
    -            }
    +            forUnaryInt(
    +                suppliers,
    +                eval + castToDoubleEvaluator(read, DataTypes.INTEGER) + "]",
    +                DataTypes.DOUBLE,
    +                i -> expected.applyAsDouble(i)
    +            );
    +            forUnaryLong(
    +                suppliers,
    +                eval + castToDoubleEvaluator(read, DataTypes.LONG) + "]",
    +                DataTypes.DOUBLE,
    +                l -> expected.applyAsDouble(l)
    +            );
    +            forUnaryUnsignedLong(
    +                suppliers,
    +                eval + castToDoubleEvaluator(read, DataTypes.UNSIGNED_LONG) + "]",
    +                DataTypes.DOUBLE,
    +                ul -> expected.applyAsDouble(ul.doubleValue())
    +            );
    +            forUnaryDouble(suppliers, eval + read + "]", DataTypes.DOUBLE, i -> expected.applyAsDouble(i));
                 return suppliers;
             }
     
    @@ -305,6 +307,81 @@ public static List forBinaryCastingToDouble(
                 return suppliers;
             }
     
    +        /**
    +         * Generate positive test cases for a unary function operating on an {@link DataTypes#INTEGER}.
    +         */
    +        public static void forUnaryInt(
    +            List suppliers,
    +            String expectedEvaluatorToString,
    +            DataType expectedType,
    +            IntFunction expectedValue
    +        ) {
    +            unaryNumeric(suppliers, expectedEvaluatorToString, DataTypes.INTEGER, expectedType, n -> expectedValue.apply(n.intValue()));
    +        }
    +
    +        /**
    +         * Generate positive test cases for a unary function operating on an {@link DataTypes#LONG}.
    +         */
    +        public static void forUnaryLong(
    +            List suppliers,
    +            String expectedEvaluatorToString,
    +            DataType expectedType,
    +            LongFunction expectedValue
    +        ) {
    +            unaryNumeric(suppliers, expectedEvaluatorToString, DataTypes.LONG, expectedType, n -> expectedValue.apply(n.longValue()));
    +        }
    +
    +        /**
    +         * Generate positive test cases for a unary function operating on an {@link DataTypes#UNSIGNED_LONG}.
    +         */
    +        public static void forUnaryUnsignedLong(
    +            List suppliers,
    +            String expectedEvaluatorToString,
    +            DataType expectedType,
    +            Function expectedValue
    +        ) {
    +            unaryNumeric(
    +                suppliers,
    +                expectedEvaluatorToString,
    +                DataTypes.UNSIGNED_LONG,
    +                expectedType,
    +                n -> expectedValue.apply((BigInteger) n)
    +            );
    +        }
    +
    +        /**
    +         * Generate positive test cases for a unary function operating on an {@link DataTypes#DOUBLE}.
    +         */
    +        public static void forUnaryDouble(
    +            List suppliers,
    +            String expectedEvaluatorToString,
    +            DataType expectedType,
    +            DoubleFunction expectedValue
    +        ) {
    +            unaryNumeric(suppliers, expectedEvaluatorToString, DataTypes.DOUBLE, expectedType, n -> expectedValue.apply(n.doubleValue()));
    +        }
    +
    +        private static void unaryNumeric(
    +            List suppliers,
    +            String expectedEvaluatorToString,
    +            DataType inputType,
    +            DataType expectedOutputType,
    +            Function expected
    +        ) {
    +            for (Map.Entry> supplier : RANDOM_VALUE_SUPPLIERS.get(inputType)) {
    +                suppliers.add(new TestCaseSupplier(supplier.getKey(), List.of(inputType), () -> {
    +                    Number value = (Number) supplier.getValue().get();
    +                    TypedData typed = new TypedData(
    +                        // TODO there has to be a better way to handle unsigned long
    +                        value instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : value,
    +                        inputType,
    +                        "value"
    +                    );
    +                    return new TestCase(List.of(typed), expectedEvaluatorToString, expectedOutputType, equalTo(expected.apply(value)));
    +                }));
    +            }
    +        }
    +
             private static final Map>>> RANDOM_VALUE_SUPPLIERS = Map.ofEntries(
                 Map.entry(
                     DataTypes.DOUBLE,
    @@ -768,19 +845,6 @@ private static Stream representable() {
             return EsqlDataTypes.types().stream().filter(EsqlDataTypes::isRepresentable);
         }
     
    -    @AfterClass
    -    public static void renderSignature() throws IOException {
    -        FunctionDefinition definition = definition();
    -        if (definition == null) {
    -            LogManager.getLogger(getTestClass()).info("Skipping rendering signature because the function isn't registered");
    -            return;
    -        }
    -
    -        String rendered = RailRoadDiagram.functionSignature(definition);
    -        LogManager.getLogger(getTestClass()).info("Writing function signature");
    -        writeToTempDir("signature", rendered, "svg");
    -    }
    -
         /**
          * Unique signatures encountered by this test.
          * 

    @@ -822,14 +886,18 @@ public static void renderTypesTable() throws IOException { return; } + List definedSignature = ShowFunctions.signature(definition); StringBuilder header = new StringBuilder(); - for (String arg : ShowFunctions.signature(definition)) { + for (String arg : definedSignature) { header.append(arg).append(" | "); } header.append("result"); List table = new ArrayList<>(); for (Map.Entry, DataType> sig : signatures.entrySet()) { + if (sig.getKey().size() != definedSignature.size()) { + continue; + } StringBuilder b = new StringBuilder(); for (DataType arg : sig.getKey()) { b.append(arg.typeName()).append(" | "); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java new file mode 100644 index 0000000000000..69c2a2817c6bc --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class CeilTests extends AbstractScalarFunctionTestCase { + public CeilTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("large double value", () -> { + double arg = 1 / randomDouble(); + return new TestCase( + List.of(new TypedData(arg, DataTypes.DOUBLE, "arg")), + "CeilDoubleEvaluator[val=Attribute[channel=0]]", + DataTypes.DOUBLE, + equalTo(Math.ceil(arg)) + ); + }), new TestCaseSupplier("integer value", () -> { + int arg = randomInt(); + return new TestCase( + List.of(new TypedData(arg, DataTypes.INTEGER, "arg")), + "Attribute[channel=0]", + DataTypes.INTEGER, + equalTo(arg) + ); + }), new TestCaseSupplier("long value", () -> { + long arg = randomLong(); + return new TestCase(List.of(new TypedData(arg, DataTypes.LONG, "arg")), "Attribute[channel=0]", DataTypes.LONG, equalTo(arg)); + }), new TestCaseSupplier("unsigned long value", () -> { + long arg = randomLong(); + return new TestCase( + List.of(new TypedData(arg, DataTypes.UNSIGNED_LONG, "arg")), + "Attribute[channel=0]", + DataTypes.UNSIGNED_LONG, + equalTo(arg) + ); + }))); + } + + @Override + protected DataType expectedType(List argTypes) { + return argTypes.get(0); + } + + @Override + protected List argSpec() { + return List.of(required(numerics())); + } + + @Override + protected Expression build(Source source, List args) { + return new Ceil(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java index 845d74cbeed84..530fcc177a0ac 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java @@ -10,43 +10,30 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - -public class FloorTests extends AbstractScalarFunctionTestCase { +public class FloorTests extends AbstractFunctionTestCase { public FloorTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("large double value", () -> { - double arg = 1 / randomDouble(); - return new TestCase( - List.of(new TypedData(arg, DataTypes.DOUBLE, "arg")), - "FloorDoubleEvaluator[val=Attribute[channel=0]]", - DataTypes.DOUBLE, - equalTo(Math.floor(arg)) - ); - }))); - } - - @Override - protected DataType expectedType(List argTypes) { - return argTypes.get(0); - } - - @Override - protected List argSpec() { - return List.of(required(numerics())); + String read = "Attribute[channel=0]"; + List suppliers = new ArrayList<>(); + TestCaseSupplier.forUnaryInt(suppliers, read, DataTypes.INTEGER, i -> i); + TestCaseSupplier.forUnaryLong(suppliers, read, DataTypes.LONG, l -> l); + TestCaseSupplier.forUnaryUnsignedLong(suppliers, read, DataTypes.UNSIGNED_LONG, ul -> NumericUtils.asLongUnsigned(ul)); + TestCaseSupplier.forUnaryDouble(suppliers, "FloorDoubleEvaluator[val=" + read + "]", DataTypes.DOUBLE, Math::floor); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, suppliers))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java index a09d7c944cb5e..5222fc605a6bd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java @@ -10,56 +10,38 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - -public class Log10Tests extends AbstractScalarFunctionTestCase { +public class Log10Tests extends AbstractFunctionTestCase { public Log10Tests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Log10 of Double", () -> { - // TODO: include larger values here - double arg = randomDouble(); - return new TestCase( - List.of(new TypedData(arg, DataTypes.DOUBLE, "arg")), - "Log10DoubleEvaluator[val=Attribute[channel=0]]", - DataTypes.DOUBLE, - equalTo(Math.log10(arg)) - ); - }), new TestCaseSupplier("Log10(negative)", () -> { - double arg = randomIntBetween(Integer.MIN_VALUE, -1); // it's inclusive - return new TestCase( - List.of(new TypedData(arg, DataTypes.DOUBLE, "arg")), - "Log10DoubleEvaluator[val=Attribute[channel=0]]", - DataTypes.DOUBLE, - equalTo(Double.NaN) - ); - }))); + String read = "Attribute[channel=0]"; + List suppliers = new ArrayList<>(); + TestCaseSupplier.forUnaryInt(suppliers, "Log10IntEvaluator[val=" + read + "]", DataTypes.DOUBLE, Math::log10); + TestCaseSupplier.forUnaryLong(suppliers, "Log10LongEvaluator[val=" + read + "]", DataTypes.DOUBLE, Math::log10); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "Log10UnsignedLongEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + ul -> Math.log10(ul.doubleValue()) + ); + TestCaseSupplier.forUnaryDouble(suppliers, "Log10DoubleEvaluator[val=" + read + "]", DataTypes.DOUBLE, Math::log10); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } @Override protected Expression build(Source source, List args) { return new Log10(source, args.get(0)); } - - @Override - protected List argSpec() { - return List.of(required(numerics())); - } - - @Override - protected DataType expectedType(List argTypes) { - return DataTypes.DOUBLE; - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java index c6549443ad880..e328f38f1b64c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java @@ -10,53 +10,38 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - -public class SqrtTests extends AbstractScalarFunctionTestCase { +public class SqrtTests extends AbstractFunctionTestCase { public SqrtTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Sqrt of Double", () -> { - // TODO: include larger values here - double arg = randomDouble(); - return new TestCase( - List.of(new TypedData(arg, DataTypes.DOUBLE, "arg")), - "SqrtDoubleEvaluator[val=Attribute[channel=0]]", - DataTypes.DOUBLE, - equalTo(Math.sqrt(arg)) - ); - }))); - } - - private Matcher resultsMatcher(List typedData) { - return equalTo(Math.sqrt((Double) typedData.get(0).data())); + String read = "Attribute[channel=0]"; + List suppliers = new ArrayList<>(); + TestCaseSupplier.forUnaryInt(suppliers, "SqrtIntEvaluator[val=" + read + "]", DataTypes.DOUBLE, Math::sqrt); + TestCaseSupplier.forUnaryLong(suppliers, "SqrtLongEvaluator[val=" + read + "]", DataTypes.DOUBLE, Math::sqrt); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "SqrtUnsignedLongEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + ul -> Math.sqrt(ul.doubleValue()) + ); + TestCaseSupplier.forUnaryDouble(suppliers, "SqrtDoubleEvaluator[val=" + read + "]", DataTypes.DOUBLE, Math::sqrt); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } @Override protected Expression build(Source source, List args) { return new Sqrt(source, args.get(0)); } - - @Override - protected List argSpec() { - return List.of(required(numerics())); - } - - @Override - protected DataType expectedType(List argTypes) { - return DataTypes.DOUBLE; - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java new file mode 100644 index 0000000000000..1119fcc80aa39 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.hamcrest.Matchers.equalTo; + +public class LeftTests extends AbstractScalarFunctionTestCase { + public LeftTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + suppliers.add(new TestCaseSupplier("long", () -> { + int length = between(1, 10); + String text = randomAlphaOfLength(10); + return new TestCase( + List.of(new TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), new TypedData(length, DataTypes.INTEGER, "length")), + "LeftEvaluator[out=[], str=Attribute[channel=0], length=Attribute[channel=1]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(text.substring(0, length))) + ); + })); + suppliers.add(new TestCaseSupplier("short", () -> { + int length = between(2, 10); + String text = randomAlphaOfLength(1); + return new TestCase( + List.of(new TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), new TypedData(length, DataTypes.INTEGER, "length")), + "LeftEvaluator[out=[], str=Attribute[channel=0], length=Attribute[channel=1]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(text)) + ); + })); + return parameterSuppliersFromTypedData(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new Left(source, args.get(0), args.get(1)); + } + + @Override + protected List argSpec() { + return List.of(required(strings()), required(integers())); + } + + @Override + protected DataType expectedType(List argTypes) { + return DataTypes.KEYWORD; + } + + public Matcher resultsMatcher(List typedData) { + String str = ((BytesRef) typedData.get(0).data()).utf8ToString(); + int length = (Integer) typedData.get(1).data(); + return equalTo(new BytesRef(str.substring(0, length))); + } + + public void testReasonableLength() { + assertThat(process("a fox call", 5), equalTo("a fox")); + } + + public void testMassiveLength() { + assertThat(process("a fox call", 10), equalTo("a fox call")); + } + + public void testNegativeLength() { + assertThat(process("a fox call", -1), equalTo("")); + } + + public void testUnicode() { + final String s = "a\ud83c\udf09tiger"; + assert s.codePointCount(0, s.length()) == 7; + assertThat(process(s, 2), equalTo("a\ud83c\udf09")); + } + + private String process(String str, int length) { + Block result = evaluator( + new Left(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) + ).get().eval(row(List.of(new BytesRef(str)))); + if (null == result) { + return null; + } + BytesRef resultByteRef = ((BytesRef) toJavaObject(result, 0)); + return resultByteRef == null ? null : resultByteRef.utf8ToString(); + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java index 520ace3e1970f..06946183cc97c 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java @@ -21,22 +21,14 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.health.ClusterIndexHealth; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.MappingMetadata; -import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.index.Index; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; import java.io.Closeable; -import java.util.List; -import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; @@ -44,22 +36,26 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -public abstract class AbstractProfilingPersistenceManager - implements - ClusterStateListener, - Closeable { +abstract class AbstractProfilingPersistenceManager implements ClusterStateListener, Closeable { protected final Logger logger = LogManager.getLogger(getClass()); private final AtomicBoolean inProgress = new AtomicBoolean(false); private final ClusterService clusterService; protected final ThreadPool threadPool; protected final Client client; + private final IndexStateResolver indexStateResolver; private volatile boolean templatesEnabled; - public AbstractProfilingPersistenceManager(ThreadPool threadPool, Client client, ClusterService clusterService) { + AbstractProfilingPersistenceManager( + ThreadPool threadPool, + Client client, + ClusterService clusterService, + IndexStateResolver indexStateResolver + ) { this.threadPool = threadPool; this.client = client; this.clusterService = clusterService; + this.indexStateResolver = indexStateResolver; } public void initialize() { @@ -95,7 +91,7 @@ public final void clusterChanged(ClusterChangedEvent event) { return; } - if (isAllResourcesCreated(event, clusterService.getSettings()) == false) { + if (areAllIndexTemplatesCreated(event, clusterService.getSettings()) == false) { logger.trace("Skipping index creation; not all required resources are present yet"); return; } @@ -109,27 +105,21 @@ public final void clusterChanged(ClusterChangedEvent event) { try (var refs = new RefCountingRunnable(() -> inProgress.set(false))) { ClusterState clusterState = event.state(); for (T index : getManagedIndices()) { - IndexState state = getIndexState(clusterState, index); + IndexState state = indexStateResolver.getIndexState(clusterState, index); if (state.getStatus().actionable) { onIndexState(clusterState, state, ActionListener.releasing(refs.acquire())); + } else if (state.getStatus() == IndexStatus.TOO_OLD) { + logger.info("Aborting index creation as index [{}] is considered too old.", index); + return; } } } } - protected boolean isAllResourcesCreated(ClusterChangedEvent event, Settings settings) { + protected boolean areAllIndexTemplatesCreated(ClusterChangedEvent event, Settings settings) { return ProfilingIndexTemplateRegistry.isAllResourcesCreated(event.state(), settings); } - /** - * Extracts the appropriate index metadata for a given index from the cluster state. - * - * @param state Current cluster state. Never null. - * @param index An index for which to retrieve index metadata. Never null. - * @return The corresponding index metadata or null if there are none. - */ - protected abstract IndexMetadata indexMetadata(ClusterState state, T index); - /** * @return An iterable of all indices that are managed by this instance. */ @@ -148,78 +138,6 @@ protected abstract void onIndexState( ActionListener listener ); - private IndexState getIndexState(ClusterState state, T index) { - IndexMetadata metadata = indexMetadata(state, index); - if (metadata == null) { - return new IndexState<>(index, null, Status.NEEDS_CREATION); - } - if (metadata.getState() == IndexMetadata.State.CLOSE) { - logger.warn( - "Index [{}] is closed. This is likely to prevent Universal Profiling from functioning correctly", - metadata.getIndex() - ); - return new IndexState<>(index, metadata.getIndex(), Status.CLOSED); - } - final IndexRoutingTable routingTable = state.getRoutingTable().index(metadata.getIndex()); - ClusterHealthStatus indexHealth = new ClusterIndexHealth(metadata, routingTable).getStatus(); - if (indexHealth == ClusterHealthStatus.RED) { - logger.trace("Index [{}] health status is RED, any pending mapping upgrades will wait until this changes", metadata.getIndex()); - return new IndexState<>(index, metadata.getIndex(), Status.UNHEALTHY); - } - MappingMetadata mapping = metadata.mapping(); - if (mapping != null) { - @SuppressWarnings("unchecked") - Map meta = (Map) mapping.sourceAsMap().get("_meta"); - int currentIndexVersion; - int currentTemplateVersion; - if (meta == null) { - logger.debug("Missing _meta field in mapping of index [{}], assuming initial version.", metadata.getIndex()); - currentIndexVersion = 1; - currentTemplateVersion = 1; - } else { - // we are extra defensive and treat any unexpected values as an unhealthy index which we won't touch. - currentIndexVersion = getVersionField(metadata.getIndex(), meta, "index-version"); - currentTemplateVersion = getVersionField(metadata.getIndex(), meta, "index-template-version"); - if (currentIndexVersion == -1 || currentTemplateVersion == -1) { - return new IndexState<>(index, metadata.getIndex(), Status.UNHEALTHY); - } - } - if (index.getVersion() > currentIndexVersion) { - return new IndexState<>(index, metadata.getIndex(), Status.NEEDS_VERSION_BUMP); - } else if (getIndexTemplateVersion() > currentTemplateVersion) { - // if there are no migrations we can consider the index up-to-date even if the index template version does not match. - List pendingMigrations = index.getMigrations(currentTemplateVersion); - if (pendingMigrations.isEmpty()) { - logger.trace( - "Index [{}] with index template version [{}] (current is [{}]) is up-to-date (no pending migrations).", - metadata.getIndex(), - currentTemplateVersion, - getIndexTemplateVersion() - ); - return new IndexState<>(index, metadata.getIndex(), Status.UP_TO_DATE); - } - logger.trace( - "Index [{}] with index template version [{}] (current is [{}]) has [{}] pending migrations.", - metadata.getIndex(), - currentTemplateVersion, - getIndexTemplateVersion(), - pendingMigrations.size() - ); - return new IndexState<>(index, metadata.getIndex(), Status.NEEDS_MAPPINGS_UPDATE, pendingMigrations); - } else { - return new IndexState<>(index, metadata.getIndex(), Status.UP_TO_DATE); - } - } else { - logger.warn("No mapping found for existing index [{}]. Index cannot be migrated.", metadata.getIndex()); - return new IndexState<>(index, metadata.getIndex(), Status.UNHEALTHY); - } - } - - // overridable for testing - protected int getIndexTemplateVersion() { - return ProfilingIndexTemplateRegistry.INDEX_TEMPLATE_VERSION; - } - protected final void applyMigrations(IndexState indexState, ActionListener listener) { String writeIndex = indexState.getWriteIndex().getName(); try (var refs = new RefCountingRunnable(() -> listener.onResponse(null))) { @@ -282,80 +200,4 @@ public void onFailure(Exception ex) { }, consumer); }); } - - private int getVersionField(Index index, Map meta, String fieldName) { - Object value = meta.get(fieldName); - if (value instanceof Integer) { - return (int) value; - } - if (value == null) { - logger.warn("Metadata version field [{}] of index [{}] is empty.", fieldName, index); - return -1; - } - logger.warn("Metadata version field [{}] of index [{}] is [{}] (expected an integer).", fieldName, index, value); - return -1; - } - - protected static final class IndexState { - private final T index; - private final Index writeIndex; - private final Status status; - private final List pendingMigrations; - - IndexState(T index, Index writeIndex, Status status) { - this(index, writeIndex, status, null); - } - - IndexState(T index, Index writeIndex, Status status, List pendingMigrations) { - this.index = index; - this.writeIndex = writeIndex; - this.status = status; - this.pendingMigrations = pendingMigrations; - } - - public T getIndex() { - return index; - } - - public Index getWriteIndex() { - return writeIndex; - } - - public Status getStatus() { - return status; - } - - public List getPendingMigrations() { - return pendingMigrations; - } - } - - enum Status { - CLOSED(false), - UNHEALTHY(false), - NEEDS_CREATION(true), - NEEDS_VERSION_BUMP(true), - UP_TO_DATE(false), - NEEDS_MAPPINGS_UPDATE(true); - - /** - * Whether a status is for informational purposes only or whether it should be acted upon and may change cluster state. - */ - private final boolean actionable; - - Status(boolean actionable) { - this.actionable = actionable; - } - } - - /** - * An index that is used by Universal Profiling. - */ - interface ProfilingIndexAbstraction { - String getName(); - - int getVersion(); - - List getMigrations(int currentIndexTemplateVersion); - } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java index d95758b44c04a..31540cffef010 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java @@ -33,18 +33,21 @@ public static class Response extends ActionResponse implements ToXContentObject private boolean profilingEnabled; private boolean resourceManagementEnabled; private boolean resourcesCreated; + private boolean pre891Data; public Response(StreamInput in) throws IOException { super(in); profilingEnabled = in.readBoolean(); resourceManagementEnabled = in.readBoolean(); resourcesCreated = in.readBoolean(); + pre891Data = in.readBoolean(); } - public Response(boolean profilingEnabled, boolean resourceManagementEnabled, boolean resourcesCreated) { + public Response(boolean profilingEnabled, boolean resourceManagementEnabled, boolean resourcesCreated, boolean pre891Data) { this.profilingEnabled = profilingEnabled; this.resourceManagementEnabled = resourceManagementEnabled; this.resourcesCreated = resourcesCreated; + this.pre891Data = pre891Data; } @Override @@ -52,7 +55,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.startObject("profiling").field("enabled", profilingEnabled).endObject(); builder.startObject("resource_management").field("enabled", resourceManagementEnabled).endObject(); - builder.startObject("resources").field("created", resourcesCreated).endObject(); + builder.startObject("resources").field("created", resourcesCreated).field("pre_8_9_1_data", pre891Data).endObject(); builder.endObject(); return builder; } @@ -62,6 +65,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(profilingEnabled); out.writeBoolean(resourceManagementEnabled); out.writeBoolean(resourcesCreated); + out.writeBoolean(pre891Data); } @Override @@ -71,12 +75,13 @@ public boolean equals(Object o) { Response response = (Response) o; return profilingEnabled == response.profilingEnabled && resourceManagementEnabled == response.resourceManagementEnabled - && resourcesCreated == response.resourcesCreated; + && resourcesCreated == response.resourcesCreated + && pre891Data == response.pre891Data; } @Override public int hashCode() { - return Objects.hash(profilingEnabled, resourceManagementEnabled, resourcesCreated); + return Objects.hash(profilingEnabled, resourceManagementEnabled, resourcesCreated, pre891Data); } @Override diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java new file mode 100644 index 0000000000000..c34858acf5986 --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.index.Index; + +import java.util.List; + +final class IndexState { + private final T index; + private final Index writeIndex; + private final IndexStatus status; + private final List pendingMigrations; + + IndexState(T index, Index writeIndex, IndexStatus status) { + this(index, writeIndex, status, null); + } + + IndexState(T index, Index writeIndex, IndexStatus status, List pendingMigrations) { + this.index = index; + this.writeIndex = writeIndex; + this.status = status; + this.pendingMigrations = pendingMigrations; + } + + public T getIndex() { + return index; + } + + public Index getWriteIndex() { + return writeIndex; + } + + public IndexStatus getStatus() { + return status; + } + + public List getPendingMigrations() { + return pendingMigrations; + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java new file mode 100644 index 0000000000000..748424386457f --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.health.ClusterIndexHealth; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; + +import java.util.List; +import java.util.Map; + +class IndexStateResolver { + private static final Logger logger = LogManager.getLogger(IndexStateResolver.class); + + private volatile boolean checkOutdatedIndices; + + IndexStateResolver(boolean checkOutdatedIndices) { + this.checkOutdatedIndices = checkOutdatedIndices; + } + + public void setCheckOutdatedIndices(boolean checkOutdatedIndices) { + this.checkOutdatedIndices = checkOutdatedIndices; + } + + public IndexState getIndexState(ClusterState state, T index) { + IndexMetadata metadata = index.indexMetadata(state); + if (metadata == null) { + return new IndexState<>(index, null, IndexStatus.NEEDS_CREATION); + } + if (metadata.getState() == IndexMetadata.State.CLOSE) { + logger.warn( + "Index [{}] is closed. This is likely to prevent Universal Profiling from functioning correctly", + metadata.getIndex() + ); + return new IndexState<>(index, metadata.getIndex(), IndexStatus.CLOSED); + } + final IndexRoutingTable routingTable = state.getRoutingTable().index(metadata.getIndex()); + ClusterHealthStatus indexHealth = new ClusterIndexHealth(metadata, routingTable).getStatus(); + if (indexHealth == ClusterHealthStatus.RED) { + logger.trace("Index [{}] health status is RED, any pending mapping upgrades will wait until this changes", metadata.getIndex()); + return new IndexState<>(index, metadata.getIndex(), IndexStatus.UNHEALTHY); + } + if (checkOutdatedIndices && metadata.getCreationVersion().before(IndexVersion.V_8_9_1)) { + logger.trace( + "Index [{}] has been created before version 8.9.1 and must be deleted before proceeding with the upgrade.", + metadata.getIndex() + ); + return new IndexState<>(index, metadata.getIndex(), IndexStatus.TOO_OLD); + } + MappingMetadata mapping = metadata.mapping(); + if (mapping != null) { + @SuppressWarnings("unchecked") + Map meta = (Map) mapping.sourceAsMap().get("_meta"); + int currentIndexVersion; + int currentTemplateVersion; + if (meta == null) { + logger.debug("Missing _meta field in mapping of index [{}], assuming initial version.", metadata.getIndex()); + currentIndexVersion = 1; + currentTemplateVersion = 1; + } else { + // we are extra defensive and treat any unexpected values as an unhealthy index which we won't touch. + currentIndexVersion = getVersionField(metadata.getIndex(), meta, "index-version"); + currentTemplateVersion = getVersionField(metadata.getIndex(), meta, "index-template-version"); + if (currentIndexVersion == -1 || currentTemplateVersion == -1) { + return new IndexState<>(index, metadata.getIndex(), IndexStatus.UNHEALTHY); + } + } + if (index.getVersion() > currentIndexVersion) { + return new IndexState<>(index, metadata.getIndex(), IndexStatus.NEEDS_VERSION_BUMP); + } else if (getIndexTemplateVersion() > currentTemplateVersion) { + // if there are no migrations we can consider the index up-to-date even if the index template version does not match. + List pendingMigrations = index.getMigrations(currentTemplateVersion); + if (pendingMigrations.isEmpty()) { + logger.trace( + "Index [{}] with index template version [{}] (current is [{}]) is up-to-date (no pending migrations).", + metadata.getIndex(), + currentTemplateVersion, + getIndexTemplateVersion() + ); + return new IndexState<>(index, metadata.getIndex(), IndexStatus.UP_TO_DATE); + } + logger.trace( + "Index [{}] with index template version [{}] (current is [{}]) has [{}] pending migrations.", + metadata.getIndex(), + currentTemplateVersion, + getIndexTemplateVersion(), + pendingMigrations.size() + ); + return new IndexState<>(index, metadata.getIndex(), IndexStatus.NEEDS_MAPPINGS_UPDATE, pendingMigrations); + } else { + return new IndexState<>(index, metadata.getIndex(), IndexStatus.UP_TO_DATE); + } + } else { + logger.warn("No mapping found for existing index [{}]. Index cannot be migrated.", metadata.getIndex()); + return new IndexState<>(index, metadata.getIndex(), IndexStatus.UNHEALTHY); + } + } + + private int getVersionField(Index index, Map meta, String fieldName) { + Object value = meta.get(fieldName); + if (value instanceof Integer) { + return (int) value; + } + if (value == null) { + logger.warn("Metadata version field [{}] of index [{}] is empty.", fieldName, index); + return -1; + } + logger.warn("Metadata version field [{}] of index [{}] is [{}] (expected an integer).", fieldName, index, value); + return -1; + } + + // overridable for testing + protected int getIndexTemplateVersion() { + return ProfilingIndexTemplateRegistry.INDEX_TEMPLATE_VERSION; + } + +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java new file mode 100644 index 0000000000000..389c0de80cc5f --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +enum IndexStatus { + CLOSED(false), + UNHEALTHY(false), + TOO_OLD(false), + NEEDS_CREATION(true), + NEEDS_VERSION_BUMP(true), + UP_TO_DATE(false), + NEEDS_MAPPINGS_UPDATE(true); + + /** + * Whether a status is for informational purposes only or whether it should be acted upon and may change cluster state. + */ + public final boolean actionable; + + IndexStatus(boolean actionable) { + this.actionable = actionable; + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java index 37959df0638ba..722a7d1dbac63 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java @@ -36,7 +36,7 @@ /** * Creates all data streams that are required for using Elastic Universal Profiling. */ -public class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { +class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { public static final List PROFILING_DATASTREAMS; static { @@ -51,8 +51,8 @@ public class ProfilingDataStreamManager extends AbstractProfilingPersistenceMana PROFILING_DATASTREAMS = Collections.unmodifiableList(dataStreams); } - public ProfilingDataStreamManager(ThreadPool threadPool, Client client, ClusterService clusterService) { - super(threadPool, client, clusterService); + ProfilingDataStreamManager(ThreadPool threadPool, Client client, ClusterService clusterService, IndexStateResolver indexStateResolver) { + super(threadPool, client, clusterService, indexStateResolver); } @Override @@ -61,7 +61,7 @@ protected void onIndexState( IndexState indexState, ActionListener listener ) { - Status status = indexState.getStatus(); + IndexStatus status = indexState.getStatus(); switch (status) { case NEEDS_CREATION -> createDataStream(indexState.getIndex(), listener); case NEEDS_VERSION_BUMP -> rolloverDataStream(indexState.getIndex(), listener); @@ -74,22 +74,6 @@ protected void onIndexState( } } - protected IndexMetadata indexMetadata(ClusterState state, ProfilingDataStream dataStream) { - Map dataStreams = state.metadata().dataStreams(); - if (dataStreams == null) { - return null; - } - DataStream ds = dataStreams.get(dataStream.getName()); - if (ds == null) { - return null; - } - Index writeIndex = ds.getWriteIndex(); - if (writeIndex == null) { - return null; - } - return state.metadata().index(writeIndex); - } - @Override protected Iterable getManagedIndices() { return PROFILING_DATASTREAMS; @@ -185,7 +169,7 @@ public void onFailure(Exception e) { /** * A datastream that is used by Universal Profiling. */ - static class ProfilingDataStream implements AbstractProfilingPersistenceManager.ProfilingIndexAbstraction { + static class ProfilingDataStream implements ProfilingIndexAbstraction { private final String name; private final int version; private final List migrations; @@ -226,6 +210,23 @@ public List getMigrations(int currentIndexTemplateVersion) { : Collections.emptyList(); } + @Override + public IndexMetadata indexMetadata(ClusterState state) { + Map dataStreams = state.metadata().dataStreams(); + if (dataStreams == null) { + return null; + } + DataStream ds = dataStreams.get(this.getName()); + if (ds == null) { + return null; + } + Index writeIndex = ds.getWriteIndex(); + if (writeIndex == null) { + return null; + } + return state.metadata().index(writeIndex); + } + @Override public String toString() { return getName(); @@ -248,4 +249,22 @@ public int hashCode() { return Objects.hash(name, version); } } + + public static boolean isAllResourcesCreated(ClusterState state, IndexStateResolver indexStateResolver) { + for (ProfilingDataStream profilingDataStream : PROFILING_DATASTREAMS) { + if (indexStateResolver.getIndexState(state, profilingDataStream).getStatus() != IndexStatus.UP_TO_DATE) { + return false; + } + } + return true; + } + + public static boolean isAnyResourceTooOld(ClusterState state, IndexStateResolver indexStateResolver) { + for (ProfilingDataStream profilingDataStream : PROFILING_DATASTREAMS) { + if (indexStateResolver.getIndexState(state, profilingDataStream).getStatus() == IndexStatus.TOO_OLD) { + return true; + } + } + return false; + } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexAbstraction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexAbstraction.java new file mode 100644 index 0000000000000..e89010cca353c --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexAbstraction.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; + +import java.util.List; + +/** + * An index that is used by Universal Profiling. + */ +interface ProfilingIndexAbstraction { + String getName(); + + int getVersion(); + + List getMigrations(int currentIndexTemplateVersion); + + /** + * Extracts the appropriate index metadata for a given index from the cluster state. + * + * @param state Current cluster state. Never null. + * @return The corresponding index metadata or null if there are none. + */ + IndexMetadata indexMetadata(ClusterState state); +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java index fe5188ce7d020..746159c23dda0 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java @@ -39,7 +39,7 @@ /** * Creates all indices that are required for using Elastic Universal Profiling. */ -public class ProfilingIndexManager extends AbstractProfilingPersistenceManager { +class ProfilingIndexManager extends AbstractProfilingPersistenceManager { // For testing public static final List PROFILING_INDICES = List.of( ProfilingIndex.regular( @@ -68,8 +68,8 @@ public class ProfilingIndexManager extends AbstractProfilingPersistenceManager

    indexState, ActionListener listener ) { - Status status = indexState.getStatus(); + IndexStatus status = indexState.getStatus(); switch (status) { case NEEDS_CREATION -> createIndex(clusterState, indexState.getIndex(), listener); case NEEDS_VERSION_BUMP -> bumpVersion(clusterState, indexState.getIndex(), listener); @@ -91,38 +91,6 @@ protected void onIndexState( } } - @Override - protected IndexMetadata indexMetadata(ClusterState state, ProfilingIndex index) { - Map indicesMetadata = state.metadata().indices(); - if (indicesMetadata == null) { - return null; - } - IndexMetadata metadata = indicesMetadata.get(index.toString()); - // prioritize the most recent generation from the current version - if (metadata == null && index.isKvIndex()) { - metadata = indicesMetadata.entrySet() - .stream() - .filter(e -> index.isMatchWithoutGeneration(e.getKey())) - // use the most recent index to make sure we use the most recent version info from the _meta field - .max(Comparator.comparingLong(e -> e.getValue().getCreationDate())) - .map(Map.Entry::getValue) - .orElse(null); - } - - // attempt to find an index from an earlier generation - if (metadata == null) { - metadata = indicesMetadata.entrySet() - .stream() - .filter(e -> index.isMatchWithoutVersion(e.getKey())) - // use the most recent index to make sure we use the most recent version info from the _meta field - .max(Comparator.comparingLong(e -> e.getValue().getCreationDate())) - .map(Map.Entry::getValue) - .orElse(null); - } - - return metadata; - } - private void bumpVersion(ClusterState state, ProfilingIndex index, ActionListener listener) { if (index.getOnVersionBump() == OnVersionBump.DELETE_OLD) { Map indicesMetadata = state.metadata().indices(); @@ -377,6 +345,38 @@ public List getMigrations(int currentIndexTemplateVersion) { : Collections.emptyList(); } + @Override + public IndexMetadata indexMetadata(ClusterState state) { + Map indicesMetadata = state.metadata().indices(); + if (indicesMetadata == null) { + return null; + } + IndexMetadata metadata = indicesMetadata.get(this.toString()); + // prioritize the most recent generation from the current version + if (metadata == null && isKvIndex()) { + metadata = indicesMetadata.entrySet() + .stream() + .filter(e -> isMatchWithoutGeneration(e.getKey())) + // use the most recent index to make sure we use the most recent version info from the _meta field + .max(Comparator.comparingLong(e -> e.getValue().getCreationDate())) + .map(Map.Entry::getValue) + .orElse(null); + } + + // attempt to find an index from an earlier generation + if (metadata == null) { + metadata = indicesMetadata.entrySet() + .stream() + .filter(e -> isMatchWithoutVersion(e.getKey())) + // use the most recent index to make sure we use the most recent version info from the _meta field + .max(Comparator.comparingLong(e -> e.getValue().getCreationDate())) + .map(Map.Entry::getValue) + .orElse(null); + } + + return metadata; + } + public OnVersionBump getOnVersionBump() { return onVersionBump; } @@ -410,4 +410,22 @@ public int hashCode() { return Objects.hash(namePrefix, version, generation, onVersionBump); } } + + public static boolean isAllResourcesCreated(ClusterState state, IndexStateResolver indexStateResolver) { + for (ProfilingIndex profilingIndex : PROFILING_INDICES) { + if (indexStateResolver.getIndexState(state, profilingIndex).getStatus() != IndexStatus.UP_TO_DATE) { + return false; + } + } + return true; + } + + public static boolean isAnyResourceTooOld(ClusterState state, IndexStateResolver indexStateResolver) { + for (ProfilingIndex profilingIndex : PROFILING_INDICES) { + if (indexStateResolver.getIndexState(state, profilingIndex).getStatus() == IndexStatus.TOO_OLD) { + return true; + } + } + return false; + } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java index 2d5d633bdccfb..49e436ea4251b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java @@ -55,14 +55,22 @@ public class ProfilingPlugin extends Plugin implements ActionPlugin { Setting.Property.NodeScope, Setting.Property.Dynamic ); + + // *Internal* setting meant as an escape hatch if we need to skip the check for outdated indices for some reason. + public static final Setting PROFILING_CHECK_OUTDATED_INDICES = Setting.boolSetting( + "xpack.profiling.check_outdated_indices", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); public static final String PROFILING_THREAD_POOL_NAME = "profiling"; private final Settings settings; private final boolean enabled; private final SetOnce registry = new SetOnce<>(); - private final SetOnce indexManager = new SetOnce<>(); private final SetOnce dataStreamManager = new SetOnce<>(); + private final SetOnce indexStateResolver = new SetOnce<>(); public ProfilingPlugin(Settings settings) { this.settings = settings; @@ -88,8 +96,11 @@ public Collection createComponents( ) { logger.info("Profiling is {}", enabled ? "enabled" : "disabled"); registry.set(new ProfilingIndexTemplateRegistry(settings, clusterService, threadPool, client, xContentRegistry)); - indexManager.set(new ProfilingIndexManager(threadPool, client, clusterService)); - dataStreamManager.set(new ProfilingDataStreamManager(threadPool, client, clusterService)); + indexStateResolver.set(new IndexStateResolver(PROFILING_CHECK_OUTDATED_INDICES.get(settings))); + clusterService.getClusterSettings().addSettingsUpdateConsumer(PROFILING_CHECK_OUTDATED_INDICES, this::updateCheckOutdatedIndices); + + indexManager.set(new ProfilingIndexManager(threadPool, client, clusterService, indexStateResolver.get())); + dataStreamManager.set(new ProfilingDataStreamManager(threadPool, client, clusterService, indexStateResolver.get())); // set initial value updateTemplatesEnabled(PROFILING_TEMPLATES_ENABLED.get(settings)); clusterService.getClusterSettings().addSettingsUpdateConsumer(PROFILING_TEMPLATES_ENABLED, this::updateTemplatesEnabled); @@ -103,6 +114,13 @@ public Collection createComponents( } } + public void updateCheckOutdatedIndices(boolean newValue) { + if (newValue == false) { + logger.info("profiling will ignore outdated indices"); + } + indexStateResolver.get().setCheckOutdatedIndices(newValue); + } + public void updateTemplatesEnabled(boolean newValue) { if (newValue == false) { logger.info("profiling index templates will not be installed or reinstalled"); @@ -134,6 +152,7 @@ public List getRestHandlers( public List> getSettings() { return List.of( PROFILING_TEMPLATES_ENABLED, + PROFILING_CHECK_OUTDATED_INDICES, TransportGetStackTracesAction.PROFILING_MAX_STACKTRACE_QUERY_SLICES, TransportGetStackTracesAction.PROFILING_MAX_DETAIL_QUERY_SLICES, TransportGetStackTracesAction.PROFILING_QUERY_REALTIME diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java index 2ea1799bc60e6..abac8971596a1 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.core.XPackSettings; public class TransportGetStatusAction extends TransportMasterNodeAction { - @Inject public TransportGetStatusAction( TransportService transportService, @@ -53,10 +52,20 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { + IndexStateResolver indexStateResolver = new IndexStateResolver(getValue(state, ProfilingPlugin.PROFILING_CHECK_OUTDATED_INDICES)); + boolean pluginEnabled = getValue(state, XPackSettings.PROFILING_ENABLED); boolean resourceManagementEnabled = getValue(state, ProfilingPlugin.PROFILING_TEMPLATES_ENABLED); - boolean resourcesCreated = ProfilingIndexTemplateRegistry.isAllResourcesCreated(state, clusterService.getSettings()); - listener.onResponse(new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, resourcesCreated)); + + boolean templatesCreated = ProfilingIndexTemplateRegistry.isAllResourcesCreated(state, clusterService.getSettings()); + boolean indicesCreated = ProfilingIndexManager.isAllResourcesCreated(state, indexStateResolver); + boolean dataStreamsCreated = ProfilingDataStreamManager.isAllResourcesCreated(state, indexStateResolver); + boolean resourcesCreated = templatesCreated && indicesCreated && dataStreamsCreated; + + boolean indicesPre891 = ProfilingIndexManager.isAnyResourceTooOld(state, indexStateResolver); + boolean dataStreamsPre891 = ProfilingDataStreamManager.isAnyResourceTooOld(state, indexStateResolver); + boolean anyPre891Data = indicesPre891 || dataStreamsPre891; + listener.onResponse(new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, resourcesCreated, anyPre891Data)); } private boolean getValue(ClusterState state, Setting setting) { diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java index 90acff05798de..0b762b5eb45da 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java @@ -48,6 +48,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; @@ -60,7 +61,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @@ -73,6 +76,7 @@ public class ProfilingDataStreamManagerTests extends ESTestCase { private VerifyingClient client; private List managedDataStreams; private int indexTemplateVersion; + private IndexStateResolver indexStateResolver; @Before public void createRegistryAndClient() { @@ -82,16 +86,17 @@ public void createRegistryAndClient() { clusterService = ClusterServiceUtils.createClusterService(threadPool); managedDataStreams = ProfilingDataStreamManager.PROFILING_DATASTREAMS; indexTemplateVersion = ProfilingIndexTemplateRegistry.INDEX_TEMPLATE_VERSION; - datastreamManager = new ProfilingDataStreamManager(threadPool, client, clusterService) { - @Override - protected boolean isAllResourcesCreated(ClusterChangedEvent event, Settings settings) { - return templatesCreated.get(); - } - + indexStateResolver = new IndexStateResolver(true) { @Override protected int getIndexTemplateVersion() { return indexTemplateVersion; } + }; + datastreamManager = new ProfilingDataStreamManager(threadPool, client, clusterService, indexStateResolver) { + @Override + protected boolean areAllIndexTemplatesCreated(ClusterChangedEvent event, Settings settings) { + return templatesCreated.get(); + } @Override protected Iterable getManagedIndices() { @@ -161,6 +166,7 @@ public void testThatRedIndexIsNotTouched() throws Exception { List.of(existingDataStream.withVersion(0)), nodes, IndexMetadata.State.OPEN, + IndexVersion.current(), false ); @@ -174,6 +180,62 @@ public void testThatRedIndexIsNotTouched() throws Exception { calledTimes.set(0); } + public void testThatOutdatedDataStreamIsDetectedIfCheckEnabled() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + templatesCreated.set(true); + + ProfilingDataStreamManager.ProfilingDataStream existingDataStream = randomFrom(ProfilingDataStreamManager.PROFILING_DATASTREAMS); + ClusterChangedEvent event = createClusterChangedEvent( + List.of(existingDataStream), + nodes, + IndexMetadata.State.OPEN, + // This is an outdated version that requires indices to be deleted upon migration + IndexVersion.V_8_8_2, + true + ); + + AtomicInteger calledTimes = new AtomicInteger(0); + + client.setVerifier((action, request, listener) -> verifyDataStreamInstalled(calledTimes, action, request, listener)); + datastreamManager.clusterChanged(event); + // should not create this index because the one that has changed is too old. Depending on the point at which the index is + // evaluated, other indices may have already been created. + assertBusy( + () -> assertThat( + calledTimes.get(), + allOf(greaterThanOrEqualTo(0), Matchers.lessThan(ProfilingDataStreamManager.PROFILING_DATASTREAMS.size())) + ) + ); + calledTimes.set(0); + } + + public void testThatOutdatedDataStreamIsIgnoredIfCheckDisabled() throws Exception { + // disable the check + indexStateResolver.setCheckOutdatedIndices(false); + + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + templatesCreated.set(true); + + ProfilingDataStreamManager.ProfilingDataStream existingDataStream = randomFrom(ProfilingDataStreamManager.PROFILING_DATASTREAMS); + ClusterChangedEvent event = createClusterChangedEvent( + List.of(existingDataStream), + nodes, + IndexMetadata.State.OPEN, + IndexVersion.V_8_8_2, + true + ); + + AtomicInteger calledTimes = new AtomicInteger(0); + + client.setVerifier((action, request, listener) -> verifyDataStreamInstalled(calledTimes, action, request, listener)); + datastreamManager.clusterChanged(event); + // should create all indices but consider the current one up-to-date + assertBusy(() -> assertThat(calledTimes.get(), equalTo(ProfilingDataStreamManager.PROFILING_DATASTREAMS.size() - 1))); + calledTimes.set(0); + } + public void testThatClosedIndexIsNotTouched() throws Exception { DiscoveryNode node = DiscoveryNodeUtils.create("node"); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); @@ -185,6 +247,7 @@ public void testThatClosedIndexIsNotTouched() throws Exception { List.of(existingDataStream.withVersion(0)), nodes, IndexMetadata.State.CLOSE, + IndexVersion.current(), true ); @@ -379,16 +442,17 @@ private ClusterChangedEvent createClusterChangedEvent( Iterable existingDataStreams, DiscoveryNodes nodes ) { - return createClusterChangedEvent(existingDataStreams, nodes, IndexMetadata.State.OPEN, true); + return createClusterChangedEvent(existingDataStreams, nodes, IndexMetadata.State.OPEN, IndexVersion.current(), true); } private ClusterChangedEvent createClusterChangedEvent( Iterable existingDataStreams, DiscoveryNodes nodes, IndexMetadata.State state, + IndexVersion indexVersion, boolean allShardsAssigned ) { - ClusterState cs = createClusterState(Settings.EMPTY, existingDataStreams, nodes, state, allShardsAssigned); + ClusterState cs = createClusterState(Settings.EMPTY, existingDataStreams, nodes, state, indexVersion, allShardsAssigned); ClusterChangedEvent realEvent = new ClusterChangedEvent( "created-from-test", cs, @@ -405,6 +469,7 @@ private ClusterState createClusterState( Iterable existingDataStreams, DiscoveryNodes nodes, IndexMetadata.State state, + IndexVersion indexVersion, boolean allShardsAssigned ) { Metadata.Builder metadataBuilder = Metadata.builder(); @@ -427,7 +492,7 @@ private ClusterState createClusterState( metadataBuilder.put(ds); IndexMetadata.Builder builder = new IndexMetadata.Builder(writeIndexName); builder.state(state); - builder.settings(indexSettings(IndexVersion.current(), 1, 1).put(IndexMetadata.SETTING_INDEX_UUID, writeIndex.getUUID())); + builder.settings(indexSettings(indexVersion, 1, 1).put(IndexMetadata.SETTING_INDEX_UUID, writeIndex.getUUID())); builder.putMapping( new MappingMetadata( MapperService.SINGLE_MAPPING_NAME, diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java index 8600a4fcc70b3..3efd1d4c041f5 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; @@ -60,7 +61,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @@ -73,6 +76,7 @@ public class ProfilingIndexManagerTests extends ESTestCase { private VerifyingClient client; private List managedIndices; private int indexTemplateVersion; + private IndexStateResolver indexStateResolver; @Before public void createRegistryAndClient() { @@ -82,9 +86,15 @@ public void createRegistryAndClient() { clusterService = ClusterServiceUtils.createClusterService(threadPool); managedIndices = ProfilingIndexManager.PROFILING_INDICES; indexTemplateVersion = ProfilingIndexTemplateRegistry.INDEX_TEMPLATE_VERSION; - indexManager = new ProfilingIndexManager(threadPool, client, clusterService) { + indexStateResolver = new IndexStateResolver(true) { @Override - protected boolean isAllResourcesCreated(ClusterChangedEvent event, Settings settings) { + protected int getIndexTemplateVersion() { + return indexTemplateVersion; + } + }; + indexManager = new ProfilingIndexManager(threadPool, client, clusterService, indexStateResolver) { + @Override + protected boolean areAllIndexTemplatesCreated(ClusterChangedEvent event, Settings settings) { return templatesCreated.get(); } @@ -92,11 +102,6 @@ protected boolean isAllResourcesCreated(ClusterChangedEvent event, Settings sett protected Iterable getManagedIndices() { return managedIndices; } - - @Override - protected int getIndexTemplateVersion() { - return indexTemplateVersion; - } }; indexManager.setTemplatesEnabled(true); } @@ -161,6 +166,7 @@ public void testThatRedIndexIsNotTouched() throws Exception { List.of(existingIndex.withVersion(0)), nodes, IndexMetadata.State.OPEN, + IndexVersion.current(), false ); @@ -174,6 +180,62 @@ public void testThatRedIndexIsNotTouched() throws Exception { calledTimes.set(0); } + public void testThatOutdatedIndexIsDetectedIfCheckEnabled() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + templatesCreated.set(true); + + ProfilingIndexManager.ProfilingIndex existingIndex = randomFrom(ProfilingIndexManager.PROFILING_INDICES); + ClusterChangedEvent event = createClusterChangedEvent( + List.of(existingIndex.withVersion(0)), + nodes, + IndexMetadata.State.OPEN, + // This is an outdated version that requires indices to be deleted upon migration + IndexVersion.V_8_8_2, + true + ); + + AtomicInteger calledTimes = new AtomicInteger(0); + + client.setVerifier((action, request, listener) -> verifyIndexInstalled(calledTimes, action, request, listener)); + indexManager.clusterChanged(event); + // should not create this index because the one that has changed is too old. Depending on the point at which the index is + // evaluated, other indices may have already been created. + assertBusy( + () -> assertThat( + calledTimes.get(), + allOf(greaterThanOrEqualTo(0), Matchers.lessThan(ProfilingIndexManager.PROFILING_INDICES.size())) + ) + ); + calledTimes.set(0); + } + + public void testThatOutdatedIndexIsIgnoredIfCheckDisabled() throws Exception { + // disable the check + indexStateResolver.setCheckOutdatedIndices(false); + + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + templatesCreated.set(true); + + ProfilingIndexManager.ProfilingIndex existingIndex = randomFrom(ProfilingIndexManager.PROFILING_INDICES); + ClusterChangedEvent event = createClusterChangedEvent( + List.of(existingIndex), + nodes, + IndexMetadata.State.OPEN, + IndexVersion.V_8_8_2, + true + ); + + AtomicInteger calledTimes = new AtomicInteger(0); + + client.setVerifier((action, request, listener) -> verifyIndexInstalled(calledTimes, action, request, listener)); + indexManager.clusterChanged(event); + // should create all indices but consider the current one up-to-date + assertBusy(() -> assertThat(calledTimes.get(), equalTo(ProfilingIndexManager.PROFILING_INDICES.size() - 1))); + calledTimes.set(0); + } + public void testThatClosedIndexIsNotTouched() throws Exception { DiscoveryNode node = DiscoveryNodeUtils.create("node"); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); @@ -185,6 +247,7 @@ public void testThatClosedIndexIsNotTouched() throws Exception { List.of(existingIndex.withVersion(0)), nodes, IndexMetadata.State.CLOSE, + IndexVersion.current(), true ); @@ -401,16 +464,17 @@ private ClusterChangedEvent createClusterChangedEvent( Iterable existingIndices, DiscoveryNodes nodes ) { - return createClusterChangedEvent(existingIndices, nodes, IndexMetadata.State.OPEN, true); + return createClusterChangedEvent(existingIndices, nodes, IndexMetadata.State.OPEN, IndexVersion.current(), true); } private ClusterChangedEvent createClusterChangedEvent( Iterable existingIndices, DiscoveryNodes nodes, IndexMetadata.State state, + IndexVersion indexVersion, boolean allShardsAssigned ) { - ClusterState cs = createClusterState(Settings.EMPTY, existingIndices, nodes, state, allShardsAssigned); + ClusterState cs = createClusterState(Settings.EMPTY, existingIndices, nodes, state, indexVersion, allShardsAssigned); ClusterChangedEvent realEvent = new ClusterChangedEvent( "created-from-test", cs, @@ -427,6 +491,7 @@ private ClusterState createClusterState( Iterable existingIndices, DiscoveryNodes nodes, IndexMetadata.State state, + IndexVersion indexVersion, boolean allShardsAssigned ) { RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); @@ -436,7 +501,7 @@ private ClusterState createClusterState( Index index = new Index(indexName, indexName); IndexMetadata.Builder builder = new IndexMetadata.Builder(indexName); builder.state(state); - builder.settings(indexSettings(IndexVersion.current(), 1, 1).put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID())); + builder.settings(indexSettings(indexVersion, 1, 1).put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID())); builder.putMapping( new MappingMetadata( MapperService.SINGLE_MAPPING_NAME, diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 7b7585cacf733..0cc37f7ed3945 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -14,7 +14,6 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.Term; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -1309,12 +1308,11 @@ private InternalAggregation doQuery( indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - try (AggregationContext context = createAggregationContext(indexSearcher, query, fieldType)) { + try (AggregationContext context = createAggregationContext(indexReader, query, fieldType)) { Aggregator aggregator = createAggregator(aggBuilder, context); aggregator.preCollection(); - indexSearcher.search(query, aggregator.asCollector()); + context.searcher().search(query, aggregator.asCollector()); aggregator.postCollection(); return aggregator.buildTopLevel(); } finally { diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index 5d8aa420a9d74..d680752efc498 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.action.index.IndexRequest; @@ -89,7 +88,6 @@ public void testMissingFields() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); @@ -108,7 +106,7 @@ public void testMissingFields() throws IOException { metricAgg.forEach(compositeBuilder::subAggregation); CompositeAggregation composite = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(compositeBuilder, timestampFieldType, valueFieldType) ); indexReader.close(); @@ -148,7 +146,6 @@ public void testCorrectFields() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); @@ -169,7 +166,7 @@ public void testCorrectFields() throws IOException { metricAgg.forEach(compositeBuilder::subAggregation); CompositeAggregation composite = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(compositeBuilder, timestampFieldType, valueFieldType) ); indexReader.close(); @@ -206,7 +203,6 @@ public void testNumericTerms() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); @@ -221,7 +217,7 @@ public void testNumericTerms() throws IOException { List metricAgg = createAggregationBuilders(singletonList(metricConfig)); metricAgg.forEach(compositeBuilder::subAggregation); - CompositeAggregation composite = searchAndReduce(indexSearcher, new AggTestConfig(compositeBuilder, valueFieldType)); + CompositeAggregation composite = searchAndReduce(indexReader, new AggTestConfig(compositeBuilder, valueFieldType)); indexReader.close(); directory.close(); @@ -259,7 +255,6 @@ public void testEmptyCounts() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); @@ -279,7 +274,7 @@ public void testEmptyCounts() throws IOException { metricAgg.forEach(compositeBuilder::subAggregation); CompositeAggregation composite = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(compositeBuilder, timestampFieldType, valueFieldType) ); indexReader.close(); @@ -449,7 +444,6 @@ public void testMissingBuckets() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); MappedFieldType metricFieldType = new NumberFieldMapper.NumberFieldType(metricField, NumberFieldMapper.NumberType.LONG); @@ -465,10 +459,7 @@ public void testMissingBuckets() throws IOException { List metricAgg = createAggregationBuilders(singletonList(metricConfig)); metricAgg.forEach(compositeBuilder::subAggregation); - CompositeAggregation composite = searchAndReduce( - indexSearcher, - new AggTestConfig(compositeBuilder, valueFieldType, metricFieldType) - ); + CompositeAggregation composite = searchAndReduce(indexReader, new AggTestConfig(compositeBuilder, valueFieldType, metricFieldType)); indexReader.close(); directory.close(); @@ -516,7 +507,6 @@ public void testTimezone() throws IOException { indexWriter.close(); DirectoryReader indexReader = DirectoryReader.open(directory); - IndexSearcher indexSearcher = newIndexSearcher(indexReader); DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); @@ -539,7 +529,7 @@ public void testTimezone() throws IOException { metricAgg.forEach(compositeBuilder::subAggregation); CompositeAggregation composite = searchAndReduce( - indexSearcher, + indexReader, new AggTestConfig(compositeBuilder, timestampFieldType, valueFieldType) ); indexReader.close(); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index c0e1053d008e8..d2b1a1d7e6772 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.analysis.MockAnalyzer; @@ -682,7 +681,6 @@ private void executeTestCase( Map fieldTypeLookup = createFieldTypes(config); Directory dir = index(docs, fieldTypeLookup); IndexReader reader = DirectoryReader.open(dir); - IndexSearcher searcher = newSearcher(reader); String dateHistoField = config.getGroupConfig().getDateHistogram().getField(); final ThreadPool threadPool = new TestThreadPool(getTestName()); @@ -691,7 +689,7 @@ private void executeTestCase( final SyncRollupIndexer action = new SyncRollupIndexer( threadPool, job, - searcher, + reader, fieldTypeLookup.values().toArray(new MappedFieldType[0]), fieldTypeLookup.get(dateHistoField) ); @@ -792,7 +790,7 @@ private Directory index(List> docs, Map documents = new ArrayList<>(); @@ -802,12 +800,12 @@ class SyncRollupIndexer extends RollupIndexer { SyncRollupIndexer( ThreadPool threadPool, RollupJob job, - IndexSearcher searcher, + IndexReader reader, MappedFieldType[] fieldTypes, MappedFieldType timestampField ) { super(threadPool, job, new AtomicReference<>(IndexerState.STARTED), null); - this.searcher = searcher; + this.reader = reader; this.fieldTypes = fieldTypes; this.timestampField = timestampField; } @@ -863,7 +861,7 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener CompositeAggregation result = null; try { - result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldTypes).withQuery(query)); + result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldTypes).withQuery(query)); } catch (IOException e) { listener.onFailure(e); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java index f7b8615413db4..1de322fef27de 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java @@ -328,7 +328,7 @@ public void testCrossClusterSearchWithApiKey() throws Exception { () -> performRequestWithApiKey(new Request("GET", "/invalid_remote:index1/_search"), apiKeyEncoded) ); assertThat(exception4.getResponse().getStatusLine().getStatusCode(), equalTo(401)); - assertThat(exception4.getMessage(), containsString("unable to authenticate user ")); + assertThat(exception4.getMessage(), containsString("unable to find apikey")); } } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index 6f1993d9b31e1..40bbc82afd211 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -37,7 +37,6 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; -import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -339,10 +338,7 @@ public void testCrossClusterSearch() throws Exception { () -> performRequestWithRemoteSearchUser(new Request("GET", "/invalid_remote:index1/_search")) ); assertThat(exception4.getResponse().getStatusLine().getStatusCode(), equalTo(401)); - assertThat( - exception4.getMessage(), - allOf(containsString("unable to authenticate user "), containsString("unable to find apikey")) - ); + assertThat(exception4.getMessage(), containsString("unable to find apikey")); // check that REST API key is not supported by cross cluster access updateClusterSettings( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java index 1492dcaf687dc..ea9e7059c7ea8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java @@ -19,6 +19,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; @@ -26,10 +27,13 @@ import org.elasticsearch.common.ssl.SslConfiguration; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; +import org.elasticsearch.transport.Header; +import org.elasticsearch.transport.HeaderValidationException; import org.elasticsearch.transport.InboundAggregator; import org.elasticsearch.transport.InboundDecoder; import org.elasticsearch.transport.InboundPipeline; @@ -42,6 +46,7 @@ import org.elasticsearch.xpack.core.security.transport.ProfileConfigurations; import org.elasticsearch.xpack.core.security.transport.SecurityTransportExceptionHandler; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.authc.CrossClusterAccessAuthenticationService; import java.net.InetSocketAddress; import java.net.SocketAddress; @@ -53,6 +58,7 @@ import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLParameters; +import static org.elasticsearch.transport.InboundDecoder.ChannelType.SERVER; import static org.elasticsearch.transport.RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE; import static org.elasticsearch.xpack.core.XPackSettings.REMOTE_CLUSTER_CLIENT_SSL_ENABLED; import static org.elasticsearch.xpack.core.XPackSettings.REMOTE_CLUSTER_CLIENT_SSL_PREFIX; @@ -72,6 +78,7 @@ public class SecurityNetty4Transport extends Netty4Transport { private final boolean remoteClusterServerSslEnabled; private final SslConfiguration remoteClusterClientSslConfiguration; private final RemoteClusterClientBootstrapOptions remoteClusterClientBootstrapOptions; + private final CrossClusterAccessAuthenticationService crossClusterAccessAuthenticationService; public SecurityNetty4Transport( final Settings settings, @@ -82,7 +89,8 @@ public SecurityNetty4Transport( final NamedWriteableRegistry namedWriteableRegistry, final CircuitBreakerService circuitBreakerService, final SSLService sslService, - final SharedGroupFactory sharedGroupFactory + final SharedGroupFactory sharedGroupFactory, + final CrossClusterAccessAuthenticationService crossClusterAccessAuthenticationService ) { super( settings, @@ -94,6 +102,7 @@ public SecurityNetty4Transport( circuitBreakerService, sharedGroupFactory ); + this.crossClusterAccessAuthenticationService = crossClusterAccessAuthenticationService; this.exceptionHandler = new SecurityTransportExceptionHandler(logger, lifecycle, (c, e) -> super.onException(c, e)); this.sslService = sslService; this.transportSslEnabled = XPackSettings.TRANSPORT_SSL_ENABLED.get(settings); @@ -150,16 +159,51 @@ protected ChannelHandler getClientChannelInitializer(DiscoveryNode node, Connect } @Override - protected InboundPipeline getInboundPipeline(boolean isRemoteClusterServerChannel) { - return new InboundPipeline( - getStatsTracker(), - threadPool::relativeTimeInMillis, - isRemoteClusterServerChannel - ? new InboundDecoder(recycler, RemoteClusterPortSettings.MAX_REQUEST_HEADER_SIZE.get(settings)) - : new InboundDecoder(recycler), - new InboundAggregator(getInflightBreaker(), getRequestHandlers()::getHandler, ignoreDeserializationErrors()), - this::inboundMessage - ); + protected InboundPipeline getInboundPipeline(Channel channel, boolean isRemoteClusterServerChannel) { + if (false == isRemoteClusterServerChannel) { + return super.getInboundPipeline(channel, false); + } else { + return new InboundPipeline( + getStatsTracker(), + threadPool::relativeTimeInMillis, + new InboundDecoder(recycler, RemoteClusterPortSettings.MAX_REQUEST_HEADER_SIZE.get(settings), SERVER), + new InboundAggregator(getInflightBreaker(), getRequestHandlers()::getHandler, ignoreDeserializationErrors()), + this::inboundMessage + ) { + @Override + protected void headerReceived(Header header) { + if (header.isHandshake() == false) { + // eagerly (before buffering the full request) authenticate all request headers for this type of channel + assert header.isRequest(); + // authn is mostly async, avoid buffering anymore data while authn is in progress + channel.config().setAutoRead(false); + // this prevents thread-context changes to propagate beyond the validation, as netty worker threads are reused + try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().newStoredContext()) { + crossClusterAccessAuthenticationService.tryAuthenticate( + header.getRequestHeaders(), + ActionListener.runAfter(ActionListener.wrap(aVoid -> { + // authn is successful -> NOOP (the complete request will be subsequently authn & authz & audited) + // Header#toString does not print credentials (which are stored in request headers) + logger.debug("Transport CCS authentication SUCCESS for [{}] on channel [{}]", header, channel); + }, e -> { + // Header#toString does not print credentials (which are stored in request headers) + logger.debug( + "Transport CCS authentication FAIL for [{}] with [{}], closing channel [{}]", + header, + e.getMessage(), + channel + ); + channel.eventLoop() + .submit(() -> channel.pipeline().fireExceptionCaught(new HeaderValidationException(header, e))); + }), () -> channel.config().setAutoRead(true)) + ); + } + } + // go on with the message parts + super.headerReceived(header); + } + }; + } } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 9cf2693fd0c42..bea99680c8611 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1623,7 +1623,8 @@ public Map> getTransports( circuitBreakerService, ipFilter, getSslService(), - getNettySharedGroupFactory(settings) + getNettySharedGroupFactory(settings), + crossClusterAccessAuthcService.get() ) ); return transportReference.get(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationService.java index 0e817d55e5be4..866bac68c33dd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationService.java @@ -112,6 +112,17 @@ public void authenticate(final String action, final TransportRequest request, fi } } + public void tryAuthenticate(Map headers, ActionListener listener) { + final ApiKeyService.ApiKeyCredentials credentials; + try { + credentials = extractApiKeyCredentialsFromHeaders(headers); + } catch (Exception e) { + listener.onFailure(e); + return; + } + tryAuthenticate(credentials, listener); + } + public void tryAuthenticate(ApiKeyService.ApiKeyCredentials credentials, ActionListener listener) { Objects.requireNonNull(credentials); apiKeyService.tryAuthenticate(clusterService.threadPool().getThreadContext(), credentials, ActionListener.wrap(authResult -> { @@ -146,7 +157,7 @@ public void tryAuthenticate(ApiKeyService.ApiKeyCredentials credentials, ActionL public ApiKeyService.ApiKeyCredentials extractApiKeyCredentialsFromHeaders(Map headers) { try { apiKeyService.ensureEnabled(); - final String credentials = headers.get(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY); + final String credentials = headers == null ? null : headers.get(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY); if (credentials == null) { throw requiredHeaderMissingException(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransport.java index ffd7437eab2e5..f930f6dbe04f2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransport.java @@ -21,6 +21,7 @@ import org.elasticsearch.transport.netty4.SharedGroupFactory; import org.elasticsearch.xpack.core.security.transport.netty4.SecurityNetty4Transport; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.authc.CrossClusterAccessAuthenticationService; import org.elasticsearch.xpack.security.transport.filter.IPFilter; public class SecurityNetty4ServerTransport extends SecurityNetty4Transport { @@ -38,7 +39,8 @@ public SecurityNetty4ServerTransport( final CircuitBreakerService circuitBreakerService, @Nullable final IPFilter authenticator, final SSLService sslService, - final SharedGroupFactory sharedGroupFactory + final SharedGroupFactory sharedGroupFactory, + final CrossClusterAccessAuthenticationService crossClusterAccessAuthenticationService ) { super( settings, @@ -49,7 +51,8 @@ public SecurityNetty4ServerTransport( namedWriteableRegistry, circuitBreakerService, sslService, - sharedGroupFactory + sharedGroupFactory, + crossClusterAccessAuthenticationService ); this.authenticator = authenticator; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java index 12198b67af88a..0c754e482f89f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.transport.netty4.SharedGroupFactory; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.authc.CrossClusterAccessAuthenticationService; import org.junit.After; import org.junit.Before; @@ -89,7 +90,8 @@ public void startThreadPool() { new NoneCircuitBreakerService(), null, mock(SSLService.class), - new SharedGroupFactory(settings) + new SharedGroupFactory(settings), + mock(CrossClusterAccessAuthenticationService.class) ); requestIdReceived = new AtomicLong(-1L); securityNettyTransport.setMessageListener(new TransportMessageListener() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java new file mode 100644 index 0000000000000..c8939645b34b5 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java @@ -0,0 +1,397 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.transport.netty4; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.VersionInformation; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.recycler.Recycler; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.mocksocket.MockSocket; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.NodeRoles; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.BytesRefRecycler; +import org.elasticsearch.transport.Compression; +import org.elasticsearch.transport.ProxyConnectionStrategy; +import org.elasticsearch.transport.RemoteClusterPortSettings; +import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.RemoteConnectionStrategy; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.SniffConnectionStrategy; +import org.elasticsearch.transport.TestOutboundRequestMessage; +import org.elasticsearch.transport.TransportInterceptor; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.netty4.SharedGroupFactory; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.authc.CrossClusterAccessAuthenticationService; +import org.junit.After; +import org.junit.Before; + +import java.net.InetSocketAddress; +import java.net.Socket; +import java.net.SocketTimeoutException; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; +import static org.elasticsearch.test.NodeRoles.onlyRole; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + +public class SecurityNetty4ServerTransportAuthenticationTests extends ESTestCase { + + private ThreadPool threadPool; + // is non-null when authn passes successfully + private AtomicReference authenticationException; + private String remoteClusterName; + private SecurityNetty4ServerTransport remoteSecurityNetty4ServerTransport; + private MockTransportService remoteTransportService; + + @SuppressWarnings("unchecked") + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + threadPool = new TestThreadPool(getClass().getName()); + authenticationException = new AtomicReference<>(); + remoteClusterName = "test-remote_cluster_service_" + randomAlphaOfLength(8); + Settings remoteSettings = Settings.builder() + .put("node.name", getClass().getName()) + .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), remoteClusterName) + .put(XPackSettings.TRANSPORT_SSL_ENABLED.getKey(), "false") + .put(XPackSettings.REMOTE_CLUSTER_SERVER_SSL_ENABLED.getKey(), "false") + .put(XPackSettings.REMOTE_CLUSTER_CLIENT_SSL_ENABLED.getKey(), "false") + .put(RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.getKey(), "true") + .put(RemoteClusterPortSettings.PORT.getKey(), 0) + .put("transport.ignore_deserialization_errors", true) + .build(); + remoteSettings = NodeRoles.nonRemoteClusterClientNode(remoteSettings); + CrossClusterAccessAuthenticationService remoteCrossClusterAccessAuthenticationService = mock( + CrossClusterAccessAuthenticationService.class + ); + doAnswer(invocation -> { + Exception authnException = authenticationException.get(); + if (authnException != null) { + ((ActionListener) invocation.getArguments()[1]).onFailure(authnException); + } else { + ((ActionListener) invocation.getArguments()[1]).onResponse(null); + } + return null; + }).when(remoteCrossClusterAccessAuthenticationService).tryAuthenticate(any(Map.class), anyActionListener()); + remoteSecurityNetty4ServerTransport = new SecurityNetty4ServerTransport( + remoteSettings, + TransportVersion.current(), + threadPool, + new NetworkService(List.of()), + PageCacheRecycler.NON_RECYCLING_INSTANCE, + new NamedWriteableRegistry(List.of()), + new NoneCircuitBreakerService(), + null, + mock(SSLService.class), + new SharedGroupFactory(remoteSettings), + remoteCrossClusterAccessAuthenticationService + ); + remoteTransportService = MockTransportService.createNewService( + remoteSettings, + remoteSecurityNetty4ServerTransport, + VersionInformation.CURRENT, + threadPool, + null, + Collections.emptySet(), + // IMPORTANT: we have to mock authentication in two places: one in the "CrossClusterAccessAuthenticationService" and the + // other before the action handler here. This is in order to accurately simulate the complete Elasticsearch node behavior. + new TransportInterceptor() { + @Override + public TransportRequestHandler interceptHandler( + String action, + String executor, + boolean forceExecution, + TransportRequestHandler actualHandler + ) { + return (request, channel, task) -> { + Exception authnException = authenticationException.get(); + if (authnException != null) { + channel.sendResponse(authnException); + } else { + actualHandler.messageReceived(request, channel, task); + } + }; + } + } + ); + DiscoveryNode remoteNode = remoteTransportService.getLocalDiscoNode(); + remoteTransportService.registerRequestHandler( + RemoteClusterNodesAction.NAME, + ThreadPool.Names.SAME, + RemoteClusterNodesAction.Request::new, + (request, channel, task) -> channel.sendResponse(new RemoteClusterNodesAction.Response(List.of(remoteNode))) + ); + remoteTransportService.start(); + remoteTransportService.acceptIncomingRequests(); + } + + @Override + @After + public void tearDown() throws Exception { + logger.info("tearDown"); + super.tearDown(); + IOUtils.close( + remoteTransportService, + remoteSecurityNetty4ServerTransport, + () -> ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS) + ); + } + + public void testProxyStrategyConnectionClosesWhenAuthenticatorAlwaysFails() throws Exception { + // all requests fail authn + authenticationException.set(new ElasticsearchSecurityException("authn failure")); + try ( + MockTransportService localService = MockTransportService.createNewService( + proxyLocalTransportSettings(), + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool + ) + ) { + localService.start(); + // all attempts to obtain a connections will fail + for (int i = 0; i < randomIntBetween(2, 4); i++) { + CountDownLatch connectionTestDone = new CountDownLatch(1); + // {@code RemoteClusterService.REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME} fails authn (both of them) and the connection is + // always closed after receiving an error response + localService.getRemoteClusterService() + .maybeEnsureConnectedAndGetConnection(remoteClusterName, true, ActionListener.wrap(connection -> { + logger.info("Unexpected: a connection is available"); + connectionTestDone.countDown(); + fail("No connection should be available if authn fails"); + }, e -> { + logger.info("Expected: no connection could not be established"); + connectionTestDone.countDown(); + assertThat(e, instanceOf(RemoteTransportException.class)); + assertThat(e.getCause(), instanceOf(authenticationException.get().getClass())); + })); + assertTrue(connectionTestDone.await(10L, TimeUnit.SECONDS)); + } + } + // but if authn passes, valid connections are available + authenticationException.set(null); + try ( + MockTransportService localService = MockTransportService.createNewService( + proxyLocalTransportSettings(), + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool + ) + ) { + localService.start(); + CountDownLatch connectionTestDone = new CountDownLatch(1); + localService.getRemoteClusterService() + .maybeEnsureConnectedAndGetConnection(remoteClusterName, true, ActionListener.wrap(connection -> { + logger.info("Expected: a connection is available"); + connectionTestDone.countDown(); + }, e -> { + logger.info("Unexpected: no connection could be established"); + connectionTestDone.countDown(); + fail("connection could not be established"); + throw new RuntimeException(e); + })); + assertTrue(connectionTestDone.await(10L, TimeUnit.SECONDS)); + } + } + + public void testSniffStrategyNoConnectionWhenAuthenticatorAlwaysFails() throws Exception { + // all requests fail authn + authenticationException.set(new ElasticsearchSecurityException("authn failure")); + try ( + MockTransportService localService = MockTransportService.createNewService( + sniffLocalTransportSettings(), + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool + ) + ) { + localService.start(); + // obtain some connections and check that they'll be promptly closed + for (int i = 0; i < randomIntBetween(2, 4); i++) { + CountDownLatch connectionTestDone = new CountDownLatch(1); + // the failed authentication during handshake must surely close the connection before + // {@code RemoteClusterNodesAction.NAME} is executed, so node sniffing will fail + localService.getRemoteClusterService() + .maybeEnsureConnectedAndGetConnection(remoteClusterName, true, ActionListener.wrap(connection -> { + logger.info("Unexpected: a connection is available"); + connectionTestDone.countDown(); + fail("No connection should be available if authn fails"); + }, e -> { + logger.info("Expected: no connection could be established"); + connectionTestDone.countDown(); + assertThat(e, instanceOf(RemoteTransportException.class)); + assertThat(e.getCause(), instanceOf(authenticationException.get().getClass())); + })); + assertTrue(connectionTestDone.await(10L, TimeUnit.SECONDS)); + } + } + // but if authn passes, valid connections are available + authenticationException.set(null); + try ( + MockTransportService localService = MockTransportService.createNewService( + sniffLocalTransportSettings(), + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool + ) + ) { + localService.start(); + CountDownLatch connectionTestDone = new CountDownLatch(1); + localService.getRemoteClusterService() + .maybeEnsureConnectedAndGetConnection(remoteClusterName, true, ActionListener.wrap(connection -> { + logger.info("Expected: a connection is available"); + connectionTestDone.countDown(); + }, e -> { + logger.info("Unexpected: no connection could be established"); + connectionTestDone.countDown(); + fail("connection could not be established"); + throw new RuntimeException(e); + })); + assertTrue(connectionTestDone.await(10L, TimeUnit.SECONDS)); + } + } + + public void testConnectionWorksForPing() throws Exception { + authenticationException.set(new ElasticsearchSecurityException("authn failure")); + TransportAddress[] boundRemoteIngressAddresses = remoteSecurityNetty4ServerTransport.boundRemoteIngressAddress().boundAddresses(); + InetSocketAddress remoteIngressTransportAddress = randomFrom(boundRemoteIngressAddresses).address(); + // ping message + final BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); + bytesStreamOutput.writeBytes(new byte[] { (byte) 'E', (byte) 'S' }); + bytesStreamOutput.writeInt(-1); + try (Socket socket = new MockSocket(remoteIngressTransportAddress.getAddress(), remoteIngressTransportAddress.getPort())) { + final byte[] pingBytes = Arrays.copyOfRange(bytesStreamOutput.bytes().array(), 0, 6); + socket.getOutputStream().write(pingBytes); + socket.getOutputStream().flush(); + // We should receive the ping back + final byte[] responseBytes = socket.getInputStream().readNBytes(6); + assertThat(responseBytes, equalTo(pingBytes)); + try { + socket.setSoTimeout(1000); + socket.getInputStream().read(); + fail("should not reach here"); + } catch (SocketTimeoutException e) { + // timeout exception means the server is still connected. Just no data is coming which is normal + } + } + } + + public void testConnectionDisconnectedWhenAuthnFails() throws Exception { + authenticationException.set(new ElasticsearchSecurityException("authn failure")); + TransportAddress[] boundRemoteIngressAddresses = remoteSecurityNetty4ServerTransport.boundRemoteIngressAddress().boundAddresses(); + InetSocketAddress remoteIngressTransportAddress = randomFrom(boundRemoteIngressAddresses).address(); + try (Socket socket = new MockSocket(remoteIngressTransportAddress.getAddress(), remoteIngressTransportAddress.getPort())) { + TestOutboundRequestMessage message = new TestOutboundRequestMessage( + threadPool.getThreadContext(), + TransportRequest.Empty.INSTANCE, + TransportVersion.current(), + "internal:whatever", + randomNonNegativeLong(), + false, + randomFrom(Compression.Scheme.DEFLATE, Compression.Scheme.LZ4, null) + ); + Recycler recycler = new BytesRefRecycler(PageCacheRecycler.NON_RECYCLING_INSTANCE); + RecyclerBytesStreamOutput out = new RecyclerBytesStreamOutput(recycler); + BytesReference bytesReference = message.serialize(out); + socket.getOutputStream().write(Arrays.copyOfRange(bytesReference.array(), 0, bytesReference.length())); + socket.getOutputStream().flush(); + + final String response = new String(socket.getInputStream().readAllBytes(), StandardCharsets.UTF_8); + assertThat(response, containsString("authn failure")); + // -1 means the other side has disconnected + assertThat(socket.getInputStream().read(), equalTo(-1)); + } + } + + private Settings sniffLocalTransportSettings() { + Settings localSettings = Settings.builder() + .put(onlyRole(DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE)) + .put(RemoteConnectionStrategy.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(remoteClusterName).getKey(), "sniff") + .put( + SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS.getConcreteSettingForNamespace(remoteClusterName).getKey(), + remoteTransportService.boundRemoteAccessAddress().publishAddress().toString() + ) + .put( + SniffConnectionStrategy.REMOTE_CONNECTIONS_PER_CLUSTER.getKey(), + randomIntBetween(1, 3) // easier to debug with just 1 connection + ) + .put( + SniffConnectionStrategy.REMOTE_NODE_CONNECTIONS.getConcreteSettingForNamespace(remoteClusterName).getKey(), + randomIntBetween(1, 3) // easier to debug with just 1 connection + ) + .build(); + { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString( + RemoteClusterService.REMOTE_CLUSTER_CREDENTIALS.getConcreteSettingForNamespace(remoteClusterName).getKey(), + randomAlphaOfLength(20) + ); + return Settings.builder().put(localSettings).setSecureSettings(secureSettings).build(); + } + } + + private Settings proxyLocalTransportSettings() { + Settings localSettings = Settings.builder() + .put(onlyRole(DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE)) + .put(RemoteConnectionStrategy.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(remoteClusterName).getKey(), "proxy") + .put( + ProxyConnectionStrategy.PROXY_ADDRESS.getConcreteSettingForNamespace(remoteClusterName).getKey(), + remoteTransportService.boundRemoteAccessAddress().publishAddress().toString() + ) + .put( + ProxyConnectionStrategy.REMOTE_SOCKET_CONNECTIONS.getConcreteSettingForNamespace(remoteClusterName).getKey(), + randomIntBetween(1, 3) // easier to debug with just 1 connection + ) + .build(); + { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString( + RemoteClusterService.REMOTE_CLUSTER_CREDENTIALS.getConcreteSettingForNamespace(remoteClusterName).getKey(), + randomAlphaOfLength(20) + ); + return Settings.builder().put(localSettings).setSecureSettings(secureSettings).build(); + } + } + +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java index 18cf0418e0942..2cfeb154693b3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.common.socket.SocketAccess; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.authc.CrossClusterAccessAuthenticationService; import org.elasticsearch.xpack.security.transport.SSLEngineUtils; import org.elasticsearch.xpack.security.transport.filter.IPFilter; @@ -107,6 +108,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; +import static org.mockito.Mockito.mock; public class SimpleSecurityNetty4ServerTransportTests extends AbstractSimpleTransportTestCase { @Override @@ -1050,7 +1052,8 @@ static class TestSecurityNetty4ServerTransport extends SecurityNetty4ServerTrans circuitBreakerService, authenticator, sslService, - sharedGroupFactory + sharedGroupFactory, + mock(CrossClusterAccessAuthenticationService.class) ); this.doHandshake = doHandshake; } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregatorTests.java index 183a663435e98..2d6bbaf8ccd97 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregatorTests.java @@ -14,7 +14,6 @@ import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; @@ -959,9 +958,7 @@ private void testCase( DirectoryReader unwrapped = DirectoryReader.open(directory); DirectoryReader indexReader = wrapDirectoryReader(unwrapped) ) { - IndexSearcher indexSearcher = newIndexSearcher(indexReader); - - A terms = (A) searchAndReduce(indexSearcher, aggTestConfig); + A terms = (A) searchAndReduce(indexReader, aggTestConfig); verify.accept(terms); } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsAggregatorTests.java index 72746fd0868ff..8be04619b5d6f 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.XYDocValuesField; import org.apache.lucene.geo.XYEncodingUtils; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.ElasticsearchParseException; @@ -51,8 +50,7 @@ public void testEmpty() throws Exception { MappedFieldType fieldType = new PointFieldMapper.PointFieldType("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.left)); @@ -74,8 +72,7 @@ public void testUnmappedFieldWithDocs() throws Exception { MappedFieldType fieldType = new PointFieldMapper.PointFieldType("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.left)); @@ -108,8 +105,7 @@ private void readAndAssertMissing(RandomIndexWriter w, Object missingVal, float String description = "Bounds aggregation with missing=" + missingVal; try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertThat(description + ": top", bounds.top, closeTo(y, GEOHASH_TOLERANCE)); assertThat(description + ": bottom", bounds.bottom, closeTo(y, GEOHASH_TOLERANCE)); assertThat(description + ": left", bounds.left, closeTo(x, GEOHASH_TOLERANCE)); @@ -128,10 +124,9 @@ public void testInvalidMissing() throws Exception { CartesianBoundsAggregationBuilder aggBuilder = new CartesianBoundsAggregationBuilder("my_agg").field("field") .missing("invalid"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); ElasticsearchParseException exception = expectThrows( ElasticsearchParseException.class, - () -> searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)) + () -> searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)) ); assertThat(exception.getMessage(), startsWith("unsupported symbol")); } @@ -166,8 +161,7 @@ public void testRandom() throws Exception { MappedFieldType fieldType = new PointFieldMapper.PointFieldType("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertCloseTo("top", numDocs, bounds.top, top); assertCloseTo("bottom", numDocs, bounds.bottom, bottom); assertCloseTo("left", numDocs, bounds.left, left); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidAggregatorTests.java index 05b890bcf0a70..6079452c9ca72 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidAggregatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.XYDocValuesField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.geo.SpatialPoint; @@ -44,8 +43,7 @@ public void testEmpty() throws Exception { MappedFieldType fieldType = new PointFieldMapper.PointFieldType("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -60,14 +58,13 @@ public void testUnmapped() throws Exception { document.add(new LatLonDocValuesField("field", 10, 10)); w.addDocument(document); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); MappedFieldType fieldType = new PointFieldMapper.PointFieldType("another_field"); - InternalCartesianCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); fieldType = new PointFieldMapper.PointFieldType("another_field"); - result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -84,10 +81,8 @@ public void testUnmappedWithMissing() throws Exception { document.add(new LatLonDocValuesField("field", 10, 10)); w.addDocument(document); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - MappedFieldType fieldType = new PointFieldMapper.PointFieldType("another_field"); - InternalCartesianCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertEquals(expectedCentroid, result.centroid()); assertTrue(AggregationInspectionHelper.hasValue(result)); } @@ -153,8 +148,7 @@ private void assertCentroid(RandomIndexWriter w, CartesianPoint expectedCentroid MappedFieldType fieldType = new PointFieldMapper.PointFieldType("field"); CartesianCentroidAggregationBuilder aggBuilder = new CartesianCentroidAggregationBuilder("my_agg").field("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertEquals("my_agg", result.getName()); SpatialPoint centroid = result.centroid(); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeBoundsAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeBoundsAggregatorTests.java index 7b91c78a01534..f2ceea6c2e87c 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeBoundsAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeBoundsAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.XYDocValuesField; import org.apache.lucene.geo.XYEncodingUtils; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.Randomness; @@ -75,8 +74,7 @@ public void testEmpty() throws Exception { Collections.emptyMap() ); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.left)); @@ -105,8 +103,7 @@ public void testUnmappedFieldWithDocs() throws Exception { Collections.emptyMap() ); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.left)); @@ -140,8 +137,7 @@ public void testMissing() throws Exception { .missing(missingVal); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertThat(bounds.top, equalTo(y)); assertThat(bounds.bottom, equalTo(y)); assertThat(bounds.left, equalTo(x)); @@ -168,10 +164,9 @@ public void testInvalidMissing() throws Exception { CartesianBoundsAggregationBuilder aggBuilder = new CartesianBoundsAggregationBuilder("my_agg").field("field") .missing("invalid"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)) + () -> searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)) ); assertThat(exception.getMessage(), startsWith("Unknown geometry type")); } @@ -256,8 +251,7 @@ private void readAndAssertExtent(RandomIndexWriter w, TestPointCollection points Collections.emptyMap() ); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertThat(description + ": top", bounds.top, closeTo(points.top, GEOHASH_TOLERANCE)); assertThat(description + ": bottom", bounds.bottom, closeTo(points.bottom, GEOHASH_TOLERANCE)); assertThat(description + ": left", bounds.left, closeTo(points.left, GEOHASH_TOLERANCE)); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java index 2c9454558b1c2..d8f00edd7873d 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java @@ -10,7 +10,6 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.XYPointField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.geo.Orientation; @@ -62,8 +61,7 @@ public void testEmpty() throws Exception { Collections.emptyMap() ); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -78,8 +76,6 @@ public void testUnmapped() throws Exception { document.add(new XYPointField("field", 10, 10)); w.addDocument(document); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - MappedFieldType fieldType = new ShapeFieldMapper.ShapeFieldType( "another_field", true, @@ -88,11 +84,11 @@ public void testUnmapped() throws Exception { null, Collections.emptyMap() ); - InternalCartesianCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); fieldType = new ShapeFieldMapper.ShapeFieldType("field", true, true, Orientation.RIGHT, null, Collections.emptyMap()); - result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -110,8 +106,6 @@ public void testUnmappedWithMissing() throws Exception { document.add(new XYPointField("field", 10, 10)); w.addDocument(document); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - MappedFieldType fieldType = new ShapeFieldMapper.ShapeFieldType( "another_field", true, @@ -120,7 +114,7 @@ public void testUnmappedWithMissing() throws Exception { null, Collections.emptyMap() ); - InternalCartesianCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertThat(result.centroid(), equalTo(expectedCentroid)); assertTrue(AggregationInspectionHelper.hasValue(result)); } @@ -194,8 +188,7 @@ private void assertCentroid(RandomIndexWriter w, CartesianPoint expectedCentroid ); CartesianCentroidAggregationBuilder aggBuilder = new CartesianCentroidAggregationBuilder("my_agg").field("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalCartesianCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalCartesianCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertEquals("my_agg", result.getName()); SpatialPoint centroid = result.centroid(); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java index 33f958616e963..9f091fa2f4c5d 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.geo.Orientation; @@ -65,8 +64,7 @@ public void testEmpty() throws Exception { Collections.emptyMap() ); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -99,8 +97,7 @@ public void testUnmappedFieldWithDocs() throws Exception { Collections.emptyMap() ); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -139,8 +136,7 @@ public void testMissing() throws Exception { .wrapLongitude(false); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertThat(bounds.top, equalTo(lat)); assertThat(bounds.bottom, equalTo(lat)); assertThat(bounds.posLeft, equalTo(lon >= 0 ? lon : Double.POSITIVE_INFINITY)); @@ -172,9 +168,8 @@ public void testInvalidMissing() throws Exception { .missing("invalid") .wrapLongitude(false); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); }); assertThat(exception.getMessage(), startsWith("Unknown geometry type")); } @@ -233,8 +228,7 @@ public void testRandomShapes() throws Exception { Collections.emptyMap() ); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE)); assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE)); assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE)); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java index 22abd561c2b2f..b477693662eda 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.geo.GeoPoint; @@ -70,8 +69,7 @@ public void testEmpty() throws Exception { Map.of() ); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -86,8 +84,6 @@ public void testUnmapped() throws Exception { document.add(new LatLonDocValuesField("field", 10, 10)); w.addDocument(document); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( "another_field", true, @@ -98,7 +94,7 @@ public void testUnmapped() throws Exception { null, Map.of() ); - InternalGeoCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); fieldType = new GeoShapeWithDocValuesFieldType( @@ -111,7 +107,7 @@ public void testUnmapped() throws Exception { null, Map.of() ); - result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -130,8 +126,6 @@ public void testUnmappedWithMissing() throws Exception { document.add(new LatLonDocValuesField("field", 10, 10)); w.addDocument(document); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( "another_field", true, @@ -142,7 +136,7 @@ public void testUnmappedWithMissing() throws Exception { null, Map.of() ); - InternalGeoCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertThat(result.centroid(), equalTo(expectedCentroid)); assertTrue(AggregationInspectionHelper.hasValue(result)); } @@ -219,8 +213,7 @@ private void assertCentroid(RandomIndexWriter w, GeoPoint expectedCentroid) thro ); GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg").field("field"); try (IndexReader reader = w.getReader()) { - IndexSearcher searcher = newSearcher(reader); - InternalGeoCentroid result = searchAndReduce(searcher, new AggTestConfig(aggBuilder, fieldType)); + InternalGeoCentroid result = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)); assertEquals("my_agg", result.getName()); SpatialPoint centroid = result.centroid();